Datasets:

Languages: English
Multilinguality: monolingual
Size Categories: 1K<n<10K
Language Creators: found
Annotations Creators: crowdsourced
Source Datasets: original
License: cc-by-4.0
id
string
tokens
sequence
ner_tags
sequence
"0"
[ "@paulwalk", "It", "'s", "the", "view", "from", "where", "I", "'m", "living", "for", "two", "weeks", ".", "Empire", "State", "Building", "=", "ESB", ".", "Pretty", "bad", "storm", "here", "last", "evening", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 8, 8, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0 ]
"1"
[ "From", "Green", "Newsfeed", ":", "AHFA", "extends", "deadline", "for", "Sage", "Award", "to", "Nov", ".", "5", "http://tinyurl.com/24agj38" ]
[ 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"2"
[ "Pxleyes", "Top", "50", "Photography", "Contest", "Pictures", "of", "August", "2010", "...", "http://bit.ly/bgCyZ0", "#photography" ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"3"
[ "today", "is", "my", "last", "day", "at", "the", "office", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"4"
[ "4Dbling", "'s", "place", "til", "monday", ",", "party", "party", "party", ".", "&lt;", "3" ]
[ 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"5"
[ "watching", "the", "VMA", "pre-show", "again", "lol", "it", "was", "n't", "even", "a", "good", "show", "the", "first", "time", "...", "so", "bored", "!" ]
[ 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"6"
[ "27", "followers", "!", "30", "followers", "is", "my", "goal", "for", "today", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"7"
[ "This", "is", "the", "2nd", "hospital", "ive", "been", "in", "today", ",", "but", "ive", "just", "seen", "a", "doctor", "who", "was", "an", "older", "version", "of", "justin", ":'", ")" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"8"
[ "Friday", "Night", "Eats", "http://twitpic.com/2pdvtr" ]
[ 0, 0, 0, 0 ]
"9"
[ "Gotta", "dress", "up", "for", "london", "fashion", "week", "and", "party", "in", "style", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"10"
[ "@Suzie55", "whispering", "cause", "I", "may", "have", "had", "1", "too", "many", "vodka", "'s", "last", "night", "and", "am", "a", "lil", "fragile", ",", "hold", "me", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"11"
[ "#NFL", "Week", "2", "picks", "blog", "is", "up", "on", "@The_Score", "website", "...", "http://blogs.thescore.com/nfl/2010/09/17/pizzolaw2/" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"12"
[ "I", "like", "@SouljaBoy", "songs", "after", "a", "few", "drinks" ]
[ 0, 0, 0, 0, 0, 0, 0, 0 ]
"13"
[ "...", "asked", "me", "a", "question", "for", "once", ",", "but", "no", "!", "You", "ask", "me", "about", "fucking", "donuts", "you", "donut", "...", "have", "a", "nice", "day", ":P" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"14"
[ "every", "year", "your", "\"", "bestfriend", "\"", "get", "yo", "ass", "in", "trouble", "smdh", "#doinme" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"15"
[ "RT", "@midgetmegs", ":", "hate", "people", "who", "write", "drunk", "status'.", "must", "be", "having", "a", "great", "time", "if", "your", "sat", "writing", "it", "on", "facebook", "....." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ]
"16"
[ "@MrzEndy", "tru", "tru", "I", "'m", "leavin", "again", "on", "Tuesday", "yo" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"17"
[ "The", "guys", "are", "cooking", "about", "a", "dozen", "pizzas", "at", "our", "house", "for", "the", "tailgate", "tonight", ".", "Nonetheless", "it", "smells", "fresh", "in", "this", "place", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"18"
[ "I", "heart", "Park(ing", ")", "Day", "!", "Photo", "of", "meter", "shark", ":", "http://bit.ly/aQun1b" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"19"
[ "hows", "everyones", "day", "goin", "?" ]
[ 0, 0, 0, 0, 0 ]
"20"
[ "@Phoebe1_", "and", "i", "also", "loved", "the", "last", "years", "eurovision", "entry", "!!", "hoppaa", "!", "they", "were", "cool", "too", "!!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"21"
[ "MetroPCS", "has", "the", "worst", "customer", "service", "I", "'ve", "experienced", "in", "months", "(", "trying", "to", "help", "someone", "order", "a", "phone", ")", "#metropcs", "#fail" ]
[ 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"22"
[ "@xSophieBx", "He", "has", "a", "habit", "of", "tweeting", "at", "the", "same", "time", "as", "you", "lol" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"23"
[ "A", "TRUE", "friend", "walks", "in", "when", "everybody", "else", "walks", "out", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"24"
[ "Yay", ".", "@penguin", "is", "coming", "to", "see", "me", "next", "week", "...", "plotting", "my", "cooking", "now", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"25"
[ "@_mistercory", "*Smiles", "up", "at", "him*", "Can", "we", "just", "...", "sit", "together", "for", "a", "while", "before", "round", "two", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"26"
[ "the", "tears", "may", "have", "been", "and", "gone", ",", "but", "the", "feeling", "still", "stay", "the", "same", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"27"
[ "@BieberEgypt", "I", "'ve", "got", "some", "bad", "news", ",", "Yeah", "I", "cant", "come", "to", "the", "meeting", "tomorrow", ".", ":", "S" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"28"
[ "@abzmedic", "I", "'m", "missing", "autumn", "because", "of", "work", ".", ":(", "I", "must", "get", "into", "the", "forests", "tomorrow", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"29"
[ "Fellas", "Do", "n't", "just", "lick", "the", "PussyY", ",", "LOVE", "IT", "!", "enjoy", "it", ".", "like", "u", "just", "got", "married", "&lt;", "the", "preacher", "said", "u", "may", "kiss", "the", "bride" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"30"
[ "RT", "@Sexstrology", ":", "Pisces", "tend", "to", "escape", "into", "fantasy", "and", "day", "dreams", ".", "There", "they", "are", "free", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"31"
[ "Cant", "wait", "for", "the", "ravens", "game", "tomorrow", "....", "go", "ray", "rice", "!!!!!!!" ]
[ 0, 0, 0, 0, 5, 0, 0, 0, 0, 9, 10, 0 ]
"32"
[ "I", "just", "took", "\"", "When", "your", "mom", "makes", "you", "go", "live", "with", "your", "dad", "Scooter", "Braun", ",", "your", "life", "...\"", "and", "got", ":", "Part", "7", "!", "Try", "it", ":", "http://tinyurl.com/27r2pdz" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"33"
[ "I", "miss", "being", "able", "to", "run", "and", "dance", "around", "all", "day", "instead", "of", "having", "to", "catch", "my", "breath", "every", "five", "minutes", "of", "my", "life", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"34"
[ "@SnoopDogg", "hey", "snoop", "my", "wife", "Cath", "is", "30", "today", ",", "any", "chance", "of", "a", "shout", "out", "to", "her", "," ]
[ 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"35"
[ "Today", "I", "got", "a", "promotion", "at", "work", ",", "and", "tomorrow", "I", "'m", "going", "home", "to", "Wisconsin", "for", "a", "few", "days", ".", "So", "content", "with", "life", "right", "now", ".", ":)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"36"
[ "TWEET", "!", "@stjosephs", "Empls", "of", "the", "Month", ":", "Deborah", "L", "#Speech", "#Pathologist-Childrens", "Rehab", "and", "Patricia", "M", "#Referral/#Auth", "#Spec-#Womens", "#Care", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 9, 10, 0, 0, 0, 0, 9, 10, 0, 0, 0, 0 ]
"37"
[ "RT", "@LilTwist", ":", "RT", "this", "if", "you", "want", "me", "to", "go", "back", "live", "on", "Ustream", "later", "tonight" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0 ]
"38"
[ "Really", "hope", "I", "can", "get", "to", "@glasgowfilm", "for", "Winter", "'s", "Bone", "-", "need", "to", "get", "on", "with", "job", "applications", "tonight", "then", "!", "Trailer", ":", "http://bit.ly/bhUlum" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"39"
[ "RT", "@WeSpazForJB", ":", "http://twitpic.com/2nn4ee", "&lt;", "----", "still", "the", "funniest", "photo", ".", "of", "all", "time", ".", "ALL", "TIME", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"40"
[ "I", "have", "eaten", "a", "large", "quantity", "of", "oranges", "this", "week", "..." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"41"
[ "Bonfire", "tonite", ".", "All", "are", "welcome", ",", "joe", "included" ]
[ 0, 0, 0, 0, 0, 0, 0, 9, 0 ]
"42"
[ "@KFSH", "totally", "!", "I", "can", "go", "a", "week", "w/o", "tv", ",", "phones", "or", "a", "computer", ".", "I", "get", "some", "time", "to", "myself", "and", "God", ".", "Brings", "peace", "into", "my", "life", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0 ]
"43"
[ "i", "want", "a", "bath", "but", "do", "n't", "have", "a", "bath", ",", "shut", "up", ",", "sam", "'s", "coming", "tomorrow", "and", "steve", "and", "tanya", "will", "be", "round", "at", "10am", "so", "go", "away", "you", "mean", "people" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 9, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"44"
[ "What", "Do", "S.F.", "Rabbis", "Eat", "Before", "and", "After", "Yom", "Kippur", "?", "We", "Asked", "Five", "to", "Enlighten", "Us", "!", "via", "@SFoodie", "http://bit.ly/aQrFUz" ]
[ 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"45"
[ "Not", "only", "is", "it", "#BattlestarGalactica", "'s", "32nd", "Anniversary", ",", "tonite", "is", "the", "3rd", "season", "premiere", "of", "#StarWars", "#TheCloneWars", "!!" ]
[ 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 0 ]
"46"
[ "just", "about", "off", "work", "!", "going", "to", "alderwood", "again", ":)", "yay", ".", "hopefully", "we", "got", "cody", "'s", "ipod", "in", "the", "mail", "today", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0 ]
"47"
[ "RT", "@Slijterijmeisje", ":", "Kreeg", "net", "een", "bruikbare", "tip", "van", "iemand", "die", "vorige", "week", "was", "begonnen", "met", "een", "whiskydieet", ",", "hij", "was", "nu", "al", "3", "dagen", "kwijt" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"48"
[ "Its", "stupid", "I", "hate", "getn", "a", "attitude", "from", "ppl", "when", "I", "'m", "jus", "tryna", "be", "nice", "n", "shit", ".", "WTF", "now", "I", "'m", "mad", "so", "dnt", "talk", "to", "me", "right", "now" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"49"
[ "RT", "@DamnTeenQuotes", ":", "I", "remember", "when", "i", "was", "your", "age", ",", "spencer", "from", "iCarly", "was", "Crazy", "Steve", ",", "Carly", "was", "Megan", "and", "Josh", "was", "fat", ".", "#damnteenquotes" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 3, 0, 9, 10, 0, 9, 0, 9, 0, 9, 0, 0, 0, 0 ]
"50"
[ "Last", "stop", "of", "the", "day", "thank", "goddddd", "(@", "H-E-B", "Plus", ")", "http://4sq.com/7RDhgd" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"51"
[ "@itzkeving", "im", "getting", "mine", "off", "this", "monday", ":'", ")" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"52"
[ "#Fancyflow", "..", "im", "in", "the", "hair", "salon", "right", "now", "..", "smh", "..", "Friday", ",", "its", "packed", "in", "here", "RT", "@neimanMarcus203", ":", "Where", "is", "@ihavesuperpower", "???" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"53"
[ "@vogueglamGIRL", "Ah", "I", "know", "!", "She", "is", "simply", "the", "best", "in", "The", "Sept", "Issue", ".", "My", "boyfriend", "'s", "aunt", "worked", "for", "Anna", "Wintor", "in", "NY", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 10, 0, 0, 0 ]
"54"
[ "You", "gotta", "let", "em", "lick", "it", "before", "they", "stick", "it" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"55"
[ "There", "'s", "a", "lot", "of", "people", "showing", "off", "their", "iPhones", "on", "facebook", "today", ",", "so", "&lt;", "so", "is", "at", "such", "a", "place", ",", "it", "'s", "really", "not", "that", "interesting", ";o", ")" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"56"
[ "@Infamous__Kid", "lol", "you", "always", "got", "problems", "with", "ur", "hair", "you", "said", "somethin", "last", "time", "like", "they", "didnt", "cut", "it", "right", "ha" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"57"
[ "For", "code", "I", "'ve", "written", "since", "Sept", "'", "09", ",", "the", "delay", "between", "commit", "to", "SCM", "and", "running", "on", "customer", "hardware", "has", "been", "about", "10", "days", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"58"
[ "AND", "EVERYTHING", "YOU", "STAND", "FOR", ",", "TURNS", "ON", "YOU", "TO", "SPITE", "YOU", "?", "WHAT", "HAPPENS", "WHEN", "YOU", "BECOME", "THE", "MAIN", "SOURCE", "OF", "HER", "PAIN", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"59"
[ "Yo", "kids", "!", "Today", "'s", "ACF", "Friday", "Large", "Group", "is", "at", "PITT", "University", "!", "It", "'s", "at", "Cathedral", "of", "Learning", "G24", "at", "7pm", "!", "Be", "sure", "to", "check", "it", "out", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 8, 0, 0, 0, 0, 7, 8, 8, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"60"
[ "RT", "@WALuvsKatStacks", ":", "Ah", ",", "i", "asked", "for", "a", "follow", "back", "-", "when", "i", "already", "hadd", "one", "so", "they", "call", "me", "thirsty", "lmao", "?", "Fck", "you", "#teamkatstacksbitch", "..." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"61"
[ ":(", "RT", "@themaine", "Who", "is", "coming", "to", "the", "show", "tomorrow", "in", "Hawaii", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0 ]
"62"
[ "chinese", "food", "..", "stayin", "in", "for", "tonight", "but", "goin", "to", "the", "archery", "range", "either", "tomorrow", "or", "sunday" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"63"
[ "@Strigy", "got", "mine", "in", "bbt", "aintree", "today", ".", "Played", "table", "tennis", "on", "it", "in", "store", "!", "V", "impressed", ".", "Did", "you", "get", "analogue", "controller", "2", "?" ]
[ 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"64"
[ "RT", "@Luq_Combs", ":", "Funniest", "thing", "I", "heard", "this", "week", ".", "Wingo", "tellin", "me", "and", "wood", ".", "\"", "I", "'m", "scared", "\"", "#pow#pow#pow@Cowboy_Wingo" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"65"
[ "Today", "'s", "cheer", "went", "from", "awful", "to", "awesome", "from", "the", "moment", "I", "realized", "I", "did", "not", "have", "class", "today", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"66"
[ "Time", "Warner", "Cable", "Boycotting", "Epix", "Movie", "Channel", "Because", "It", "Did", "A", "Deal", "With", "Netflix", "http://dlvr.it/5RgcL" ]
[ 1, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0 ]
"67"
[ "Fresh", "out", "the", "psych", "ward", "today" ]
[ 0, 0, 0, 0, 0, 0 ]
"68"
[ "Today", "was", "awesome", ",", "I", "finally", "get", "to", "relax", ":)" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"69"
[ "My", "#twitter", "age", "is", "458", "days", "0", "hours", "3", "minutes", "49", "seconds", ".", "Find", "out", "yours", "at", "http://twitter.seocoder.org/", "#twittertime" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"70"
[ "What", "is", "it", "with", "people", "and", "punctuality", "?", "I", "used", "to", "be", "that", "guy", "and", "honestly", ",", "it", "'s", "a", "haze", "now", ".", "Just", "be", "on", "time", ".", "It", "'s", "not", "like", "its", "a", "surprise", "." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"71"
[ "After", "I", "pet", "a", "kitten", "on", "JFK", ",", "the", "owners", "informed", "me", "it", "'s", "'", "double-pawed", "'", "(", "polydactyl)", ",", "which", "is", "\"", "a", "form", "of", "retardation", "in", "the", "animal", "kingdom", "\"" ]
[ 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"72"
[ "Soulja", "Boy", "|", "TheDeAndreWay", ".", "com", "-", "The", "DeAndre", "Way", "in", "stores", "Nov", ".", "2", ",", "2010", "http://t.co/woAiLlZ", "via", "@SouljaBoy" ]
[ 9, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"73"
[ "About", "online", "dietician", ":", "A", "thinner", "stool", "than", "usual", "can", "be", "identified", "and", "removed", "before", "they", "get", "are", "2", "weeks", "ago", "J", "...", "http://bit.ly/cYGBdz" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"74"
[ "RT", "@RMBWilliams", ":", "Here", "in", "DC", "!", "About", "to", "get", "some", "rest", "!", "Looking", "forward", "to", "tonight", "!", "Yes-sir", "!" ]
[ 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"75"
[ "RT", "@twitter", ":", "#AskEv", "your", "big", "questions", "about", "the", "new", "Twitter", ".", "com", "today", "at", "1pm", "PST", ".", "'ll", "be", "responding", "from", "his", "own", "account", "(@ev)", ".", "Make", "..." ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"76"
[ "I", "need", "a", "syringe", "of", "anything", "you", "want", "but", "which", "will", "switch", "me", "off", "till", "tomorrow", "noon", ",", "anyone", "??" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"77"
[ "RT", "@high_n_fly", ":", "#FACT", "when", "you", "are", "comfortable", "with", "yourself", "nothing", "else", "matters", "nobody", "can", "tell", "you", "about", "yourself", "#HnF" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"78"
[ "Lindsay", "Lohan", "allegedly", "failed", "her", "drug", "test", ".", "So", "not", "fetch", ".", "Quick", ",", "let", "'s", "all", "put", "on", "our", "shocked", "faces", "that", "23", "days", "of", "rehab", "did", "n't", "work", "!" ]
[ 9, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"79"
[ "I", "remember", "when", "you", "all", "were", "thiiis", "big", ",", "you", "know", "?" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"80"
[ "I", "have", "a", "strong", "hunch", "Adam", "will", "be", "a", "story", "on", "TMZ", "tonight", ".", "Gulp", "." ]
[ 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0 ]
"81"
[ "WEDNESDAY", "NITES", "WE", "CRANK", "IT", "UP", "AT", "VISIONS", "LOUNGE", "!!!!!", "18+", "NO", "COLLEGE", "ID", "NEEDED", "!", "LADIES", "FREE", "B4", "11PM", "!", "VISIONS", "LOUNGE", "(", "247", "1ST", "AVE", "HKY", ")" ]
[ 0, 0, 0, 0, 0, 0, 0, 7, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 8, 0, 0, 0, 0, 0, 0 ]
"82"
[ "@angelportugues", "LMAO", "!", "When", "is", "tht", "one", "day", "?:", "P" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"83"
[ "RT", "@PLLGirls", ":", "Episodes", "1-10", "of", "Pretty", "Little", "Liars", "will", "air", "in", "the", "UK", "on", "October", "18", "!" ]
[ 0, 0, 0, 0, 0, 0, 3, 4, 4, 0, 0, 0, 0, 7, 0, 0, 0, 0 ]
"84"
[ "@Loserface_Laura", "when", "mike", "lets", "me", "know", ",", "I", "will", "let", "you", "know", ".", "I", "mean", "everyone", "might", "just", "switch", "out", "a", "lot", "." ]
[ 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"85"
[ "Such", "a", "shiny", "morning", "!!!", "Love", "It", ".", "Anyways", ",", "today", "will", "be", "very", "busy", "!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"86"
[ "@BornThisWayBaby", "thankk", "yhuu", "#monsterlove", "u", "just", "made", "my", "day", ":D" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"87"
[ "Why", "hate", "on", "somebody", "for", "doing", "well", "when", "you", "can", "simply", "say", "great", "job", "and", "move", "on", "..", "[Haters", "u", "can", "kill", "yourself", "]", "cuz", "ur", "losers" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"88"
[ "If", "you", "are", "staying", "in", "Gainesville", ",", "come", "check", "out", "Costa", "Lounge", "TONIGHT", "!", "Say", "you", "are", "with", "salsa", "caliente", "and", "get", "in", "...", "http://fb.me/FZVzm8H9" ]
[ 0, 0, 0, 0, 0, 7, 0, 0, 0, 0, 7, 8, 0, 0, 0, 0, 0, 0, 9, 10, 0, 0, 0, 0, 0 ]
"89"
[ "@jackjohnson", "where", "can", "I", "get", "the", "official", "set", "list", "to", "the", "show", "at", "Berkeley", "in", "Oct", "6th", "?!" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 0, 0, 0 ]
"90"
[ "Check", "this", "video", "out", "--", "Three", "Days", "Grace", "-", "Break", "(", "Official", "Music", "Video", ")", "[", "HQ", "]", "http://t.co/GOwCLQJ", "via", "@youtube" ]
[ 0, 0, 0, 0, 0, 5, 6, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"91"
[ "Haha", "I", "seen", "a", "ped", "egg", "called", "a", "foot", "cheese", "grater", "today", ".", "I", "found", "it", "proper", "hilarious", "xD" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"92"
[ "friday", "night", "missions", "iguess", "!" ]
[ 0, 0, 0, 0, 0 ]
"93"
[ "Just", "got", "home", "from", "school", ".", ":D", "it", "'s", "friday", "guys", "!," ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"94"
[ "@ohy22SD", "Hopefully", "the", "Force", "will", "be", "VERY", "strong", "with", "the", "#Padres", "tonight", ".", "We", "NEED", "a", "win", ".", "#ScoreSomeRuns" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0 ]
"95"
[ "\"", "before", "we", "start", ",", "how", "old", "are", "you", "?", "HER", ":", "I", "'m", "15", "..", "old", "enough", "!........", "#FindTheNearestExit" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"96"
[ "RT", "@ericjohnsalut", "\"", "Good", "Morning", "!", "Have", "a", "pleasant", "and", "happy", "day", "!\"", "http://dlvr.it/5Rh4w" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"97"
[ "when", "did", "@thecodysimpson", "last", "tweet", ",?", ";)" ]
[ 0, 0, 0, 0, 0, 0, 0 ]
"98"
[ "@JoshRamsayArmy", "Kay", "sorry", "...", "Well", "my", "dad", "lest", "when", "i", "was", "3", ".", "He", "has", "issues", ".", "He", "held", "a", "knife", "at", "mom", ".", "I", "can", "STILL", "remember", "that", "...", "=/" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]
"99"
[ "OH", ":", "Power", "nap", ".", "I", "need", "it", ".", "Its", "been", "a", "stressful", "week", ".", "I'm", "excited", "for", "xmas", ",", "haha", ".", "Goodnight", ".", ":)", "http://bit.ly/9fQGkC" ]
[ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ]

Dataset Card for "wnut_17"

Dataset Summary

WNUT 17: Emerging and Rare entity recognition

This shared task focuses on identifying unusual, previously-unseen entities in the context of emerging discussions. Named entities form the basis of many modern approaches to other tasks (like event clustering and summarisation), but recall on them is a real problem in noisy text - even among annotators. This drop tends to be due to novel entities and surface forms. Take for example the tweet “so.. kktny in 30 mins?” - even human experts find entity kktny hard to detect and resolve. This task will evaluate the ability to detect and classify novel, emerging, singleton named entities in noisy text.

The goal of this task is to provide a definition of emerging and of rare entities, and based on that, also datasets for detecting these entities.

Supported Tasks and Leaderboards

More Information Needed

Languages

More Information Needed

Dataset Structure

Data Instances

  • Size of downloaded dataset files: 0.80 MB
  • Size of the generated dataset: 1.74 MB
  • Total amount of disk used: 2.55 MB

An example of 'train' looks as follows.

{
    "id": "0",
    "ner_tags": [0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 8, 8, 0, 7, 0, 0, 0, 0, 0, 0, 0, 0],
    "tokens": ["@paulwalk", "It", "'s", "the", "view", "from", "where", "I", "'m", "living", "for", "two", "weeks", ".", "Empire", "State", "Building", "=", "ESB", ".", "Pretty", "bad", "storm", "here", "last", "evening", "."]
}

Data Fields

The data fields are the same among all splits:

  • id (string): ID of the example.
  • tokens (list of string): Tokens of the example text.
  • ner_tags (list of class labels): NER tags of the tokens (using IOB2 format), with possible values:
    • 0: O
    • 1: B-corporation
    • 2: I-corporation
    • 3: B-creative-work
    • 4: I-creative-work
    • 5: B-group
    • 6: I-group
    • 7: B-location
    • 8: I-location
    • 9: B-person
    • 10: I-person
    • 11: B-product
    • 12: I-product

Data Splits

train validation test
3394 1009 1287

Dataset Creation

Curation Rationale

More Information Needed

Source Data

Initial Data Collection and Normalization

More Information Needed

Who are the source language producers?

More Information Needed

Annotations

Annotation process

More Information Needed

Who are the annotators?

More Information Needed

Personal and Sensitive Information

More Information Needed

Considerations for Using the Data

Social Impact of Dataset

More Information Needed

Discussion of Biases

More Information Needed

Other Known Limitations

More Information Needed

Additional Information

Dataset Curators

More Information Needed

Licensing Information

More Information Needed

Citation Information

@inproceedings{derczynski-etal-2017-results,
    title = "Results of the {WNUT}2017 Shared Task on Novel and Emerging Entity Recognition",
    author = "Derczynski, Leon  and
      Nichols, Eric  and
      van Erp, Marieke  and
      Limsopatham, Nut",
    booktitle = "Proceedings of the 3rd Workshop on Noisy User-generated Text",
    month = sep,
    year = "2017",
    address = "Copenhagen, Denmark",
    publisher = "Association for Computational Linguistics",
    url = "https://www.aclweb.org/anthology/W17-4418",
    doi = "10.18653/v1/W17-4418",
    pages = "140--147",
    abstract = "This shared task focuses on identifying unusual, previously-unseen entities in the context of emerging discussions.
                Named entities form the basis of many modern approaches to other tasks (like event clustering and summarization),
                but recall on them is a real problem in noisy text - even among annotators.
                This drop tends to be due to novel entities and surface forms.
                Take for example the tweet {``}so.. kktny in 30 mins?!{''} {--} even human experts find the entity {`}kktny{'}
                hard to detect and resolve. The goal of this task is to provide a definition of emerging and of rare entities,
                and based on that, also datasets for detecting these entities. The task as described in this paper evaluated the
                ability of participating entries to detect and classify novel and emerging named entities in noisy text.",
}

Contributions

Thanks to @thomwolf, @lhoestq, @stefan-it, @lewtun, @jplu for adding this dataset.

Downloads last month
3,590

Models trained or fine-tuned on wnut_17