diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 5af564cce24bf359552480dc352d7bfa54912d3d..0cd12e7eb71a3f1b6e56300a2e8497e0aba17d8c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -295,7 +295,7 @@ lint:
       df-wiki-cli meilisearch 
       --host ${MEILI_HOST}
       --key "${MEILI_MASTER_KEY}"
-      index-update refseqsanitized index
+      index-update refseqsanitized sys_id
     - >
       df-wiki-cli
       meilisearch 
diff --git a/packages/df-wiki-cli/df_wiki_cli/meilisearch/main.py b/packages/df-wiki-cli/df_wiki_cli/meilisearch/main.py
index b7bbffdb4d84331f6438652cc2777b0d5ffa4dc8..ddf75daf53df00b9b532fa119a88f0d41b966ecd 100644
--- a/packages/df-wiki-cli/df_wiki_cli/meilisearch/main.py
+++ b/packages/df-wiki-cli/df_wiki_cli/meilisearch/main.py
@@ -52,7 +52,8 @@ def main(
 def delete_all_documents(ctx: typer.Context, id: str):
     client = meilisearch.Client(ctx.obj.host, ctx.obj.key)
     index = client.index(id)
-    index.delete_all_documents()
+    tasks = index.delete_all_documents()
+    console.print(tasks)
 
 
 @app.command()
diff --git a/packages/df-wiki-cli/df_wiki_cli/meilisearch/update/main.py b/packages/df-wiki-cli/df_wiki_cli/meilisearch/update/main.py
index e23eae193be0b8ac0dec01fc902903c227782011..ef1e2c20874558e25e177200f050a73932d9af9e 100644
--- a/packages/df-wiki-cli/df_wiki_cli/meilisearch/update/main.py
+++ b/packages/df-wiki-cli/df_wiki_cli/meilisearch/update/main.py
@@ -523,7 +523,7 @@ def split_on_comma(str_val: Optional[str]) -> Optional[List[str]]:
         return None
 
 
-def update_refseq(client, document, file):
+def update_refseq(client, document, file, primary_key="sys_id"):
     index = client.index(document.lower())
     documents = []
     with open(file, "r") as csvfile:
@@ -536,7 +536,7 @@ def update_refseq(client, document, file):
             row["accession_in_sys"] = split_on_comma(row["accession_in_sys"])
             doc = RefSeqCsv(**row)
             documents.append(doc.model_dump(by_alias=True))
-        tasks = index.add_documents_in_batches(documents, primary_key="sys_id")
+        tasks = index.add_documents_in_batches(documents, primary_key=primary_key)
         for task in tasks:
             console.print(task)
     index.update_pagination_settings({"maxTotalHits": 1000000})