Update metadata (#5)
Browse files- Update metadata (e7d2f611d72bda7f9ea2685d9d66013793044ded)
README.md
CHANGED
|
@@ -7,7 +7,7 @@ language_creators:
|
|
| 7 |
language:
|
| 8 |
- en
|
| 9 |
license:
|
| 10 |
-
- cc-by-4.0
|
| 11 |
multilinguality:
|
| 12 |
- monolingual
|
| 13 |
size_categories:
|
|
@@ -87,12 +87,13 @@ configs:
|
|
| 87 |
- **Homepage:** https://yale-lily.github.io/spider
|
| 88 |
- **Repository:** https://github.com/taoyds/spider
|
| 89 |
- **Paper:** https://www.aclweb.org/anthology/D18-1425/
|
|
|
|
| 90 |
- **Point of Contact:** [Yale LILY](https://yale-lily.github.io/)
|
| 91 |
|
| 92 |
### Dataset Summary
|
| 93 |
|
| 94 |
-
Spider is a large-scale complex and cross-domain semantic parsing and text-to-SQL dataset annotated by 11 Yale students
|
| 95 |
-
The goal of the Spider challenge is to develop natural language interfaces to cross-domain databases
|
| 96 |
|
| 97 |
### Supported Tasks and Leaderboards
|
| 98 |
|
|
@@ -186,11 +187,35 @@ the [CC BY-SA 4.0](https://creativecommons.org/licenses/by-sa/4.0/legalcode)
|
|
| 186 |
### Citation Information
|
| 187 |
|
| 188 |
```
|
| 189 |
-
@
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
| 193 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 194 |
}
|
| 195 |
```
|
| 196 |
|
|
|
|
| 7 |
language:
|
| 8 |
- en
|
| 9 |
license:
|
| 10 |
+
- cc-by-sa-4.0
|
| 11 |
multilinguality:
|
| 12 |
- monolingual
|
| 13 |
size_categories:
|
|
|
|
| 87 |
- **Homepage:** https://yale-lily.github.io/spider
|
| 88 |
- **Repository:** https://github.com/taoyds/spider
|
| 89 |
- **Paper:** https://www.aclweb.org/anthology/D18-1425/
|
| 90 |
+
- **Paper:** https://arxiv.org/abs/1809.08887
|
| 91 |
- **Point of Contact:** [Yale LILY](https://yale-lily.github.io/)
|
| 92 |
|
| 93 |
### Dataset Summary
|
| 94 |
|
| 95 |
+
Spider is a large-scale complex and cross-domain semantic parsing and text-to-SQL dataset annotated by 11 Yale students.
|
| 96 |
+
The goal of the Spider challenge is to develop natural language interfaces to cross-domain databases.
|
| 97 |
|
| 98 |
### Supported Tasks and Leaderboards
|
| 99 |
|
|
|
|
| 187 |
### Citation Information
|
| 188 |
|
| 189 |
```
|
| 190 |
+
@inproceedings{yu-etal-2018-spider,
|
| 191 |
+
title = "{S}pider: A Large-Scale Human-Labeled Dataset for Complex and Cross-Domain Semantic Parsing and Text-to-{SQL} Task",
|
| 192 |
+
author = "Yu, Tao and
|
| 193 |
+
Zhang, Rui and
|
| 194 |
+
Yang, Kai and
|
| 195 |
+
Yasunaga, Michihiro and
|
| 196 |
+
Wang, Dongxu and
|
| 197 |
+
Li, Zifan and
|
| 198 |
+
Ma, James and
|
| 199 |
+
Li, Irene and
|
| 200 |
+
Yao, Qingning and
|
| 201 |
+
Roman, Shanelle and
|
| 202 |
+
Zhang, Zilin and
|
| 203 |
+
Radev, Dragomir",
|
| 204 |
+
editor = "Riloff, Ellen and
|
| 205 |
+
Chiang, David and
|
| 206 |
+
Hockenmaier, Julia and
|
| 207 |
+
Tsujii, Jun{'}ichi",
|
| 208 |
+
booktitle = "Proceedings of the 2018 Conference on Empirical Methods in Natural Language Processing",
|
| 209 |
+
month = oct # "-" # nov,
|
| 210 |
+
year = "2018",
|
| 211 |
+
address = "Brussels, Belgium",
|
| 212 |
+
publisher = "Association for Computational Linguistics",
|
| 213 |
+
url = "https://aclanthology.org/D18-1425",
|
| 214 |
+
doi = "10.18653/v1/D18-1425",
|
| 215 |
+
pages = "3911--3921",
|
| 216 |
+
archivePrefix={arXiv},
|
| 217 |
+
eprint={1809.08887},
|
| 218 |
+
primaryClass={cs.CL},
|
| 219 |
}
|
| 220 |
```
|
| 221 |
|