gsaltintas commited on
Commit
dcc0964
·
verified ·
1 Parent(s): 8bcb964

Uploading tokenizer_robustness_completion_italian_contractions subset

Browse files
README.md CHANGED
@@ -512,6 +512,132 @@ dataset_info:
512
  num_examples: 39
513
  download_size: 40662
514
  dataset_size: 21915
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
515
  configs:
516
  - config_name: tokenizer_robustness_completion_italian_abbreviations
517
  data_files:
@@ -529,6 +655,10 @@ configs:
529
  data_files:
530
  - split: test
531
  path: tokenizer_robustness_completion_italian_code_language_script_switching/test-*
 
 
 
 
532
  ---
533
 
534
  # Dataset Card for Tokenization Robustness
 
512
  num_examples: 39
513
  download_size: 40662
514
  dataset_size: 21915
515
+ - config_name: tokenizer_robustness_completion_italian_contractions
516
+ features:
517
+ - name: question
518
+ dtype: string
519
+ - name: choices
520
+ list: string
521
+ - name: answer
522
+ dtype: int64
523
+ - name: answer_label
524
+ dtype: string
525
+ - name: split
526
+ dtype: string
527
+ - name: subcategories
528
+ dtype: string
529
+ - name: category
530
+ dtype: string
531
+ - name: lang
532
+ dtype: string
533
+ - name: second_lang
534
+ dtype: string
535
+ - name: notes
536
+ dtype: string
537
+ - name: id
538
+ dtype: string
539
+ - name: set_id
540
+ dtype: string
541
+ - name: variation_id
542
+ dtype: string
543
+ - name: perturbed_word
544
+ dtype: string
545
+ - name: vanilla_cos_sim_to_canonical
546
+ struct:
547
+ - name: CohereLabs/aya-expanse-8b
548
+ dtype: float64
549
+ - name: Qwen/Qwen3-8B
550
+ dtype: float64
551
+ - name: bigscience/bloom
552
+ dtype: float64
553
+ - name: common-pile/comma-v0.1-1t
554
+ dtype: float64
555
+ - name: facebook/xglm-564M
556
+ dtype: float64
557
+ - name: google-bert/bert-base-multilingual-cased
558
+ dtype: float64
559
+ - name: google/byt5-small
560
+ dtype: float64
561
+ - name: google/gemma-2-2b
562
+ dtype: float64
563
+ - name: gpt2
564
+ dtype: float64
565
+ - name: meta-llama/Llama-3.2-1B
566
+ dtype: float64
567
+ - name: microsoft/Phi-3-mini-4k-instruct
568
+ dtype: float64
569
+ - name: mistralai/tekken
570
+ dtype: float64
571
+ - name: tiktoken/gpt-4o
572
+ dtype: float64
573
+ - name: tokenmonster/englishcode-32000-consistent-v1
574
+ dtype: float64
575
+ - name: trimmed_cos_sim_to_canonical
576
+ struct:
577
+ - name: CohereLabs/aya-expanse-8b
578
+ dtype: float64
579
+ - name: Qwen/Qwen3-8B
580
+ dtype: float64
581
+ - name: bigscience/bloom
582
+ dtype: float64
583
+ - name: common-pile/comma-v0.1-1t
584
+ dtype: float64
585
+ - name: facebook/xglm-564M
586
+ dtype: float64
587
+ - name: google-bert/bert-base-multilingual-cased
588
+ dtype: float64
589
+ - name: google/byt5-small
590
+ dtype: float64
591
+ - name: google/gemma-2-2b
592
+ dtype: float64
593
+ - name: gpt2
594
+ dtype: float64
595
+ - name: meta-llama/Llama-3.2-1B
596
+ dtype: float64
597
+ - name: microsoft/Phi-3-mini-4k-instruct
598
+ dtype: float64
599
+ - name: mistralai/tekken
600
+ dtype: float64
601
+ - name: tiktoken/gpt-4o
602
+ dtype: float64
603
+ - name: tokenmonster/englishcode-32000-consistent-v1
604
+ dtype: float64
605
+ - name: token_counts
606
+ struct:
607
+ - name: CohereLabs/aya-expanse-8b
608
+ dtype: int64
609
+ - name: Qwen/Qwen3-8B
610
+ dtype: int64
611
+ - name: bigscience/bloom
612
+ dtype: int64
613
+ - name: common-pile/comma-v0.1-1t
614
+ dtype: int64
615
+ - name: facebook/xglm-564M
616
+ dtype: int64
617
+ - name: google-bert/bert-base-multilingual-cased
618
+ dtype: int64
619
+ - name: google/byt5-small
620
+ dtype: int64
621
+ - name: google/gemma-2-2b
622
+ dtype: int64
623
+ - name: gpt2
624
+ dtype: int64
625
+ - name: meta-llama/Llama-3.2-1B
626
+ dtype: int64
627
+ - name: microsoft/Phi-3-mini-4k-instruct
628
+ dtype: int64
629
+ - name: mistralai/tekken
630
+ dtype: int64
631
+ - name: tiktoken/gpt-4o
632
+ dtype: int64
633
+ - name: tokenmonster/englishcode-32000-consistent-v1
634
+ dtype: int64
635
+ splits:
636
+ - name: test
637
+ num_bytes: 12086
638
+ num_examples: 21
639
+ download_size: 33858
640
+ dataset_size: 12086
641
  configs:
642
  - config_name: tokenizer_robustness_completion_italian_abbreviations
643
  data_files:
 
655
  data_files:
656
  - split: test
657
  path: tokenizer_robustness_completion_italian_code_language_script_switching/test-*
658
+ - config_name: tokenizer_robustness_completion_italian_contractions
659
+ data_files:
660
+ - split: test
661
+ path: tokenizer_robustness_completion_italian_contractions/test-*
662
  ---
663
 
664
  # Dataset Card for Tokenization Robustness
tokenizer_robustness_completion_italian_contractions/test-00000-of-00001.parquet CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3d8c056fa0d800bcf8f71a065be0dd547fd08958dd402dcdc345a8fb16ce7d1c
3
- size 33865
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:65a22dc588893e14de45bee5e43ce6cd95f99bca412a0a1d6a6c411ffc3ed45a
3
+ size 33858