diff --git a/Manifest.toml b/Manifest.toml
index ee237c14ebdbcb0c0fa6f47fe6721f7f7949e049..ea0773eb0dde06adeec3d1bba08992dca80fc192 100644
--- a/Manifest.toml
+++ b/Manifest.toml
@@ -461,7 +461,7 @@ uuid = "6218d12a-5da1-5696-b52f-db25d2ecc6d1"
 version = "1.2.1"
 
 [[deps.ImageMagick_jll]]
-deps = ["Artifacts", "Ghostscript_jll", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Libtiff_jll", "Pkg", "Zlib_jll", "libpng_jll"]
+deps = ["Artifacts", "Ghostscript_jll", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Libtiff_jll", "OpenJpeg_jll", "Pkg", "Zlib_jll", "libpng_jll"]
 git-tree-sha1 = "124626988534986113cfd876e3093e4a03890f58"
 uuid = "c73af94c-d91f-53ed-93a7-00f77d67a9d7"
 version = "6.9.12+3"
@@ -678,6 +678,12 @@ version = "2.36.0+0"
 deps = ["Libdl", "OpenBLAS_jll", "libblastrampoline_jll"]
 uuid = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e"
 
+[[deps.LittleCMS_jll]]
+deps = ["Artifacts", "JLLWrappers", "JpegTurbo_jll", "Libdl", "Libtiff_jll", "Pkg"]
+git-tree-sha1 = "110897e7db2d6836be22c18bffd9422218ee6284"
+uuid = "d3a379c0-f9a3-5b72-a4c0-6bf4d2e8af0f"
+version = "2.12.0+0"
+
 [[deps.LogExpFunctions]]
 deps = ["DocStringExtensions", "IrrationalConstants", "LinearAlgebra"]
 git-tree-sha1 = "c3ce8e7420b3a6e071e0fe4745f5d4300e37b13f"
@@ -854,6 +860,12 @@ git-tree-sha1 = "a4ca623df1ae99d09bc9868b008262d0c0ac1e4f"
 uuid = "18a262bb-aa17-5467-a713-aee519bc75cb"
 version = "3.1.4+0"
 
+[[deps.OpenJpeg_jll]]
+deps = ["Artifacts", "JLLWrappers", "Libdl", "Libtiff_jll", "LittleCMS_jll", "Pkg", "libpng_jll"]
+git-tree-sha1 = "76374b6e7f632c130e78100b166e5a48464256f8"
+uuid = "643b3616-a352-519d-856d-80112ee9badc"
+version = "2.4.0+0"
+
 [[deps.OpenLibm_jll]]
 deps = ["Artifacts", "Libdl"]
 uuid = "05823500-19ac-5b8b-9628-191a04bc5112"
@@ -936,11 +948,11 @@ version = "0.3.2"
 
 [[deps.PlanarLarvae]]
 deps = ["DelimitedFiles", "HDF5", "JSON3", "LinearAlgebra", "MAT", "Meshes", "OrderedCollections", "Random", "SHA", "StaticArrays", "Statistics", "StatsBase", "StructTypes"]
-git-tree-sha1 = "ff28ccc1ff8bc94478df9d9bfc73a38f2273003e"
+git-tree-sha1 = "ef6169e9f8705569925bef897704c7514b4d5f18"
 repo-rev = "main"
 repo-url = "https://gitlab.pasteur.fr/nyx/planarlarvae.jl"
 uuid = "c2615984-ef14-4d40-b148-916c85b43307"
-version = "0.11.2"
+version = "0.12.0"
 
 [[deps.PlotUtils]]
 deps = ["ColorSchemes", "Colors", "Dates", "PrecompileTools", "Printf", "Random", "Reexport", "Statistics"]
diff --git a/Project.toml b/Project.toml
index b2799a788939137295dd376e10fa2b317a81fa04..42e4c24b473d581cd75cde40a37fd22d420f60b1 100644
--- a/Project.toml
+++ b/Project.toml
@@ -1,7 +1,7 @@
 name = "LarvaTagger"
 uuid = "8b3b36f1-dfed-446e-8561-ea19fe966a4d"
 authors = ["François Laurent", "Institut Pasteur"]
-version = "0.15.2"
+version = "0.16"
 
 [deps]
 Colors = "5ae59095-9a9b-59fe-a467-6f913c188581"
diff --git a/README.md b/README.md
index e102b38f9ab06c8be6c54701c8f92a2bfefcad91..84d9df3189dbda6fc4b491ae840ed01b682a835c 100644
--- a/README.md
+++ b/README.md
@@ -67,17 +67,17 @@ The GUI is provided by a webserver.
 
 Running `LarvaTagger.jl` sets the server up. Once the server is ready, the app can be loaded in a webbrowser at the address indicated (typically `localhost:9284`).
 
-### Using the `larvatagger.jl` script
+### Using the `larvatagger` script
 
-If you cloned the repository, we recommend you run `LarvaTagger.jl` using the `larvatagger.jl` script to be found in the `scripts` directory:
+If you cloned the repository, we recommend you run `LarvaTagger.jl` using the `larvatagger` script to be found in the `scripts` directory:
 
 ```
-scripts/larvatagger.jl open path/to/data/file --browser
+scripts/larvatagger open path/to/data/file --browser
 ```
 
 The script will actually open a Julia interpreter, and give some guidance on how to exit the interpreter.
 
-For now, `larvatagger.jl open` cannot be run with no input arguments. A track data file is required.
+For now, `larvatagger open` cannot be run with no input arguments. A track data file is required.
 
 The `--browser` argument may open a new tab in your webbrowser, but this feature is known to not work in some situations.
 
@@ -85,7 +85,7 @@ The first time the application is loaded, it may take a while for a window in yo
 
 ### From the Julia interpreter
 
-As an alternative to the *larvatagger.jl* script, in the `LarvaTagger` directory created above, launch the Julia interpreter:
+As an alternative to the *larvatagger* script, in the `LarvaTagger` directory created above, launch the Julia interpreter:
 ```
 julia --project=.
 ```
@@ -111,29 +111,32 @@ Otherwise, to let *larvaeditor* know about MaggotUBA or any other backend, in th
 julia> using LarvaTagger; display(larvaeditor("path/to/data/file"; backend_directory="path/to/MaggotUBA's/parent/directory"))
 ```
 
-Similarly, to let *larvatagger.jl* know about MaggotUBA:
+Similarly, to let *larvatagger* know about MaggotUBA:
 ```
-scripts/larvatagger.jl open <path/to/data/file> --backends=<path/to/MaggotUBA's/parent/directory> --browser
+scripts/larvatagger open <path/to/data/file> --backends=<path/to/MaggotUBA's/parent/directory> --browser
 ```
 
-The *larvatagger.jl* script can also be used to train a new tagger:
+The *larvatagger* script can also be used to train a new tagger:
 ```
-scripts/larvatagger.jl train <path/to/backend> <path/to/data/repository> <tagger-name>
+scripts/larvatagger train <path/to/backend> <path/to/data/repository> <tagger-name>
 ```
 and apply this tagger to a tracking data file:
 ```
-scripts/larvatagger.jl predict <path/to/backend> <tagger-name> <path/to/data/file>
+scripts/larvatagger predict <path/to/backend> <tagger-name> <path/to/data/file>
 ```
 
 Note: since `TaggingBackends==0.10`, argument `--skip-make-dataset` is default behavior; pass `--make-dataset` instead to enforce the former default.
 
-To run `larvatagger.jl predict` in parallel on multiple data files using the same tagger, append the `--data-isolation` argument to avoid data conflicts.
-Indeed, without this latter argument, a tagger accesses dedicated locations in the filesystem and these locations would be shared by all the simultaneously running *larvatagger.jl* instances.
+To run `larvatagger predict` in parallel on multiple data files using the same tagger, append the `--data-isolation` argument to avoid data conflicts.
+Indeed, without this latter argument, a tagger accesses dedicated locations in the filesystem and these locations would be shared by all the simultaneously running *larvatagger* instances.
 `--data-isolation` makes the tagger use temporary directories instead, so that these directories are specific to the running process, in addition to be specific to the tagger.
 
+Since version 0.16 with `TaggingBackends>=0.14`, `larvatagger train` admits a `--fine-tune` option to further train an already trained tagger.
+This use case differs from standard training in that it loads a classifier whose output cannot be redesigned. As a consequence, the labels in the training dataset should match those used to train the tagger in the first place.
+
 The `train` and `predict` commands admit more arguments. Check them out with:
 ```
-scripts/larvatagger.jl --help
+scripts/larvatagger --help
 ```
 
 The trained taggers are stored in the `models` directory in the backend.
@@ -149,7 +152,7 @@ See the [release announcements](https://gitlab.pasteur.fr/nyx/larvatagger.jl/-/i
 
 ### CUDA errors
 
-On calling `larvatagger.jl predict` using a MaggotUBA-based tagger, if CUDA complains with messages such as:
+On calling `larvatagger predict` using a MaggotUBA-based tagger, if CUDA complains with messages such as:
 ```
 .../torch/cuda/__init__.py:... Can't initialize NVML
 ```
diff --git a/recipes/README.md b/recipes/README.md
index e18e888ed9020dbb2e5aa5efdd2e8e6a47c1d90a..760df31e5cff4553d7b8b203a446c2171e7ffe17 100644
--- a/recipes/README.md
+++ b/recipes/README.md
@@ -163,7 +163,7 @@ docker pull flaur/larvatagger
 ```
 
 Beware that images that ship with backends are relatively large files (>5GB on disk).
-If you are not interested in automatic tagging, use the `flaur/larvatagger:0.15.2-standalone` image instead.
+If you are not interested in automatic tagging, use the `flaur/larvatagger:0.16-standalone` image instead.
 
 ### Upgrading
 
@@ -191,7 +191,7 @@ The port LarvaTagger.jl listens to must also be exported with `-p 9284:9284` so
 
 ### `train` and `predict`
 
-Just like the *larvatagger.jl* script, the docker image admits more commands/switches, including `import`, `train` and `predict`, or options such as `--help`.
+Just like the *larvatagger* script, the docker image admits more commands/switches, including `import`, `train` and `predict`, or options such as `--help`.
 For these other commands, neither `-i` nor `-p 9284:9284` are necessary.
 
 See the main [README](https://gitlab.pasteur.fr/nyx/larvatagger.jl#automatic-tagging) for usage information.
diff --git a/scripts/larvatagger b/scripts/larvatagger
index 861fc2439238c82f61fda77838fa3a7d3645653c..4c7779ab51cb86d2beac1f77e6875cf3c835b8b0 100755
--- a/scripts/larvatagger
+++ b/scripts/larvatagger
@@ -29,9 +29,10 @@ import|merge|train|predict|--version|-V)
 LarvaTagger
 
 Usage:
-  larvatagger open <file-path> [--backends=<path>] [--port=<number>] [--quiet] [--viewer] [--browser] [--manual-label=<label>]
+  larvatagger open <file-path> [--backends=<path>] [--port=<number>] [--quiet] [--viewer] [--browser] [--manual-label=<label>] [--segment=<t0,t1>]
   larvatagger import <input-path> [<output-file>] [--id=<id>] [--framerate=<fps>] [--pixelsize=<μm>] [--overrides=<comma-separated-list>] [--default-label=<label>] [--manual-label=<label>] [--decode] [--copy-labels]
   larvatagger train <backend-path> <data-path> <model-instance> [--pretrained-model=<instance>] [--labels=<comma-separated-list>] [--sample-size=<N>] [--balancing-strategy=<strategy>] [--class-weights=<csv>] [--manual-label=<label>] [--layers=<N>] [--iterations=<N>] [--seed=<seed>]
+  larvatagger train <backend-path> <data-path> <model-instance> --fine-tune=<instance> [--balancing-strategy=<strategy>] [--manual-label=<label>] [--iterations=<N>] [--seed=<seed>]
   larvatagger predict <backend-path> <model-instance> <data-path> [--output=<filename>] [--make-dataset] [--skip-make-dataset] [--data-isolation]
   larvatagger merge <input-path> <input-file> [<output-file>] [--manual-label=<label>] [--decode]
   larvatagger -V | --version
@@ -55,6 +56,7 @@ Options:
   --layers=<N>         (MaggotUBA) Number of layers of the classifier.
   --iterations=<N>     (MaggotUBA) Number of training iterations (can be two integers separated by a comma).
   --seed=<seed>        Seed for the backend's random number generators.
+  --segment=<t0,t1>    Start and end times (included, comma-separated) for cropping and including tracks.
   --decode             Do not encode the labels into integer indices.
   --copy-labels        Replicate discrete behavior data from the input file.
   --default-label=<label>             Label all untagged data as <label>.
@@ -63,6 +65,7 @@ Options:
   --class-weights=<csv>               Comma-separated list of floats.
   --pretrained-model=<instance>       Name of the pretrained encoder (from `pretrained_models` registry).
   --balancing-strategy=<strategy>     Any of `auto`, `maggotuba`, `none` [default: auto].
+  --fine-tune=<instance>              Load and fine-tune an already trained model.
   --overrides=<comma-separated-list>  Comma-separated list of key:value pairs.
   -o <filename> --output=<filename>   Predicted labels filename.
 
diff --git a/scripts/larvatagger.sh b/scripts/larvatagger.sh
index 4ead861d3a9d64fa5b40f47caf058a82cec0da1c..1156bf286f39d80bad05b9c373aa119b5a009c4c 100755
--- a/scripts/larvatagger.sh
+++ b/scripts/larvatagger.sh
@@ -15,19 +15,21 @@ for _ in $(seq $#); do
       ;;
     --no-rm)
       without_rm=1
+      shift
+      ;;
+    --rm)
+      # default since v0.16
+      shift
       ;;
     *)
-      if [ "$1" = "--rm" ]; then
-        with_rm=1
-      fi
       # note: if DOCKER_ARGS is externally defined, it must end with an explicit space
       DOCKER_ARGS="${DOCKER_ARGS}$1 "
       shift
   esac
 done
 
-if [ -z "$with_rm" -a -z "$without_rm" ]; then
-  echo "Upcoming change: --rm will become default; pass --no-rm to maintain current behavior"
+if [ -z "$without_rm" ]; then
+  DOCKER_ARGS="${DOCKER_ARGS}--rm "
 fi
 
 if [ -z "$docker" ]; then
diff --git a/src/Taggers.jl b/src/Taggers.jl
index 4503192cfdf89b0ebf73cae8368d8055b0615203..0d4cd24331ef6d69266e5545cf1bf4d4aa1e0389 100644
--- a/src/Taggers.jl
+++ b/src/Taggers.jl
@@ -2,7 +2,7 @@ module Taggers
 
 import PlanarLarvae.Formats, PlanarLarvae.Dataloaders
 
-export Tagger, isbackend, resetmodel, resetdata, train, predict
+export Tagger, isbackend, resetmodel, resetdata, train, predict, finetune
 
 struct Tagger
     backend_dir::String
@@ -230,4 +230,20 @@ function predict(tagger::Tagger; kwargs...)
     run(Cmd(`poetry run tagging-backend predict $(args)`; dir=tagger.backend_dir))
 end
 
+function finetune(tagger::Tagger; original_instance=nothing, kwargs...)
+    args = ["--model-instance", tagger.model_instance]
+    if !isnothing(original_instance)
+        push!(args, "--original-model-instance")
+        push!(args, original_instance)
+    end
+    if !isnothing(tagger.sandbox)
+        push!(args, "--sandbox")
+        push!(args, tagger.sandbox)
+    end
+    parsekwargs!(args, kwargs)
+    ret = run(Cmd(`poetry run tagging-backend finetune $(args)`; dir=tagger.backend_dir))
+    @assert isdir(modeldir(tagger))
+    return ret
+end
+
 end # module
diff --git a/src/cli_base.jl b/src/cli_base.jl
index 7ab730eb063143e1c2aef4a3271379ffaee17843..bb6506766b1b65f26bd5d06a89fc6c6292c800b4 100644
--- a/src/cli_base.jl
+++ b/src/cli_base.jl
@@ -5,6 +5,7 @@ function import′(infile, outfile;
         defaultlabel=nothing,
         decode=false,
         copylabels=false,
+        segment=nothing,
         kwargs...)
     file = load(infile; kwargs...)
     #
@@ -67,6 +68,18 @@ function import′(infile, outfile;
     if !decode
         encodelabels!(run)
     end
+    if !isnothing(segment)
+        if file isa Formats.Trxmat && !copylabels
+            @warn "Cropping empty tracks; didn't you forget --copy-labels?"
+        end
+        if segment isa String || segment isa SubString
+            segment = map(split(segment, ',')) do t
+                parse(Float64, t)
+            end
+        end
+        t0, t1 = segment
+        run = Datasets.segment(run, t0, t1)
+    end
     if isnothing(outfile)
         Datasets.write_json(stdout, run)
     else
diff --git a/src/cli_toolkit.jl b/src/cli_toolkit.jl
index 8a85b8909fb059243f4e1941c9f2667cb2bd99be..371d4a44047ef962d056504d6820deacfa061f54 100644
--- a/src/cli_toolkit.jl
+++ b/src/cli_toolkit.jl
@@ -15,8 +15,9 @@ export main
 usage = """Larva Tagger.
 
 Usage:
-  larvatagger-toolkit.jl import <input-path> [<output-file>] [--id=<id>] [--framerate=<fps>] [--pixelsize=<μm>] [--overrides=<comma-separated-list>] [--default-label=<label>] [--manual-label=<label>] [--decode] [--copy-labels]
+  larvatagger-toolkit.jl import <input-path> [<output-file>] [--id=<id>] [--framerate=<fps>] [--pixelsize=<μm>] [--overrides=<comma-separated-list>] [--default-label=<label>] [--manual-label=<label>] [--decode] [--copy-labels] [--segment=<t0,t1>]
   larvatagger-toolkit.jl train <backend-path> <data-path> <model-instance> [--pretrained-model=<instance>] [--labels=<comma-separated-list>] [--sample-size=<N>] [--balancing-strategy=<strategy>] [--class-weights=<csv>] [--manual-label=<label>] [--layers=<N>] [--iterations=<N>] [--seed=<seed>]
+  larvatagger-toolkit.jl train <backend-path> <data-path> <model-instance> --fine-tune=<instance> [--balancing-strategy=<strategy>] [--manual-label=<label>] [--iterations=<N>] [--seed=<seed>]
   larvatagger-toolkit.jl predict <backend-path> <model-instance> <data-path> [--output=<filename>] [--make-dataset] [--skip-make-dataset] [--data-isolation]
   larvatagger-toolkit.jl merge <input-path> <input-file> [<output-file>] [--manual-label=<label>] [--decode]
   larvatagger-toolkit.jl -V | --version
@@ -33,8 +34,9 @@ Options:
   --data-isolation     Isolate the backend data directories for parallel tagging of multiple data files.
   --sample-size=<N>    Sample only N track segments from the data repository.
   --layers=<N>         (MaggotUBA) Number of layers of the classifier.
-  --iterations=<N>     (MaggotUBA) Number of training iterations (can be two integers separated by a comma).
+  --iterations=<N>     Number of training iterations (integer or comma-separated list of integers).
   --seed=<seed>        Seed for the backend's random number generators.
+  --segment=<t0,t1>    Start and end times (included, comma-separated) for cropping and including tracks.
   --decode             Do not encode the labels into integer indices.
   --copy-labels        Replicate discrete behavior data from the input file.
   --default-label=<label>             Label all untagged data as <label>.
@@ -43,6 +45,7 @@ Options:
   --class-weights=<csv>               Comma-separated list of floats.
   --pretrained-model=<instance>       Name of the pretrained encoder (from `pretrained_models` registry).
   --balancing-strategy=<strategy>     Any of `auto`, `maggotuba`, `none` [default: auto].
+  --fine-tune=<instance>              Load and fine-tune an already trained model.
   --overrides=<comma-separated-list>  Comma-separated list of key:value pairs.
   -o <filename> --output=<filename>   Predicted labels filename.
 
@@ -78,6 +81,10 @@ Commands:
     <data-path> can be a path to a file or directory.
     --class-weights requires --labels to be defined and the specified comma-separated values
     should match those given by --labels.
+    --fine-tune acts like a child switch and leads to separate logic. Class labels and
+    weights are inherited from the model instance to further train. Underrepresented classes
+    cannot be excluded but the corresponding track points can be unlabelled to make the
+    classes just missing.
 
   predict   Automatically label tracking data.
 
@@ -122,6 +129,7 @@ function main(args=ARGS; exit_on_error=true)
         kwargs[:defaultlabel] = parsed_args["--default-label"]
         kwargs[:decode] = parsed_args["--decode"]
         kwargs[:copylabels] = parsed_args["--copy-labels"]
+        kwargs[:segment] = parsed_args["--segment"]
         import′(infile, outfile; kwargs...)
 
     elseif parsed_args["merge"]
@@ -144,22 +152,32 @@ function main(args=ARGS; exit_on_error=true)
         tagger = Tagger(backend_path, model_instance)
         Taggers.reset(tagger)
         Taggers.push(tagger, data_path)
+        # common arguments
         kwargs = Dict{Symbol, Any}()
-        layers = parsed_args["--layers"]
-        isnothing(layers) || (kwargs[:layers] = layers)
+        kwargs[:balancing_strategy] = parsed_args["--balancing-strategy"]
+        kwargs[:include_all] = parsed_args["--manual-label"]
+        # additional arguments whose default value is not `nothing`
         iterations = parsed_args["--iterations"]
         isnothing(iterations) || (kwargs[:iterations] = iterations)
-        classweights = parsed_args["--class-weights"]
-        isnothing(classweights) || (kwargs[:class_weights] = classweights)
         seed = parsed_args["--seed"]
         isnothing(seed) || (kwargs[:seed] = seed)
-        train(tagger;
-              pretrained_instance=parsed_args["--pretrained-model"],
-              labels=parsed_args["--labels"],
-              sample_size=parsed_args["--sample-size"],
-              balancing_strategy=parsed_args["--balancing-strategy"],
-              include_all=parsed_args["--manual-label"],
-              kwargs...)
+        #
+        finetune_model = parsed_args["--fine-tune"]
+        if isnothing(finetune_model) # standard train
+            kwargs[:pretrained_instance] = parsed_args["--pretrained-model"]
+            kwargs[:sample_size] = parsed_args["--sample-size"]
+            kwargs[:labels] = parsed_args["--labels"]
+            #
+            layers = parsed_args["--layers"]
+            isnothing(layers) || (kwargs[:layers] = layers)
+            classweights = parsed_args["--class-weights"]
+            isnothing(classweights) || (kwargs[:class_weights] = classweights)
+            #
+            train(tagger; kwargs...)
+        else # fine-tuning
+            kwargs[:original_instance] = finetune_model
+            finetune(tagger; kwargs...)
+        end
 
     elseif parsed_args["predict"]
         backend_path = parsed_args["<backend-path>"]
diff --git a/test/deploy_and_test.sh b/test/deploy_and_test.sh
index 0e8d0dbd4b8e34246f04af2dd6ff9f023a1647d2..71e3406660068220d960b8a8175a82e5cb94a681 100755
--- a/test/deploy_and_test.sh
+++ b/test/deploy_and_test.sh
@@ -37,7 +37,7 @@ if ! julia -v &> /dev/null; then
   # do NOT use juliaup to install Julia
   module load Python &> /dev/null # Maestro
   python3 -m pip install jill
-  python3 -m jill install -v 1.9.0 -c
+  python3 -m jill install -v 1.9.1 -c
 fi
 
 # h5diff (for tests) on Maestro
diff --git a/test/predict_and_retrain.sh b/test/predict_and_retrain.sh
index e72d981d6597c39f6b1c004037abee40dd2e63d8..ebcaa6245fb92404228493b20b4a3976a74e4bc9 100755
--- a/test/predict_and_retrain.sh
+++ b/test/predict_and_retrain.sh
@@ -34,7 +34,7 @@ head -n30 "${DATADIR1}/predicted.label"
 echo '...'
 
 # train a new tagger with the 2-data-file repository
-./larvatagger.sh train "data" newtagger
+./larvatagger.sh train "data" newtagger --iterations 10
 
 cat models/newtagger/clf_config.json
 
diff --git a/test/scenarii.sh b/test/scenarii.sh
index 43bf428a766716e9bb6d8ddafa49c882850ce219..59a96d49a14ad72fc414f258b986496c9cd975a9 100755
--- a/test/scenarii.sh
+++ b/test/scenarii.sh
@@ -68,6 +68,7 @@ prepareTrainingData() {
   echo $tmpdir
 }
 
+# requires: partial_predictions.label
 testImportLabelFile() {
   filename=result_imported.label
   # run
@@ -80,6 +81,20 @@ testImportLabelFile() {
   rm -f "$datapath/$filename"
 }
 
+# requires: cropped.label
+testCropTracks() {
+  filename=result_cropped.label
+  # run
+  cd "$project_root"
+  echo "\"$larvataggerjl\" import \"$datadir/trx.mat\" $filename --copy-labels --segment=59,62"
+  "$larvataggerjl" import "$datadir/trx.mat" $filename --copy-labels --segment=59,62
+  # test
+  assertTrue '\`import\` failed to reproduce the cropped.label file' '$(cmp "$datapath/cropped.label" "$datapath/$filename")'
+  # clean up
+  rm -f "$datapath/$filename"
+}
+
+# requires: sample.spine sample.outline test_train_default/predicted.label
 testPredictDefault() {
   tagger="test_train_default$tagger_suffix"
   tmpdir=$(prepareTestData $tagger)
@@ -101,50 +116,55 @@ testPredictDefault() {
   assertTrue "\`predict\` failed to reproduce file $filename" '$(cmp "$expected_labels" "$predictions")'
 }
 
+# requires: sample.spine sample.outline original_predictions.label test_train_default/*
 testTrainDefault() {
   tagger="test_train_default$tagger_suffix"
   tmpdir=$(prepareTrainingData original_predictions.label $tagger)
   # run
   cd "$project_root"
-  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir\" $tagger"
+  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir\" $tagger --seed $seed"
   "$larvataggerjl" train "$maggotuba" "$tmpdir" $tagger --seed $seed
   # test
   postTrain $tagger
 }
 
+# requires: sample.spine sample.outline imported.label test_train_one_class/*
 testTrainOneClass() {
   tagger="test_train_one_class$tagger_suffix"
   tmpdir=$(prepareTrainingData imported.label $tagger)
   # run
   cd "$project_root"
-  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir\" $tagger --seed $seed --labels=\"back-up,not back-up\""
-  "$larvataggerjl" train "$maggotuba" "$tmpdir" $tagger --seed $seed --labels="back-up,not back-up"
+  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir\" $tagger --iterations 10 --seed $seed --labels=\"back-up,not back-up\""
+  "$larvataggerjl" train "$maggotuba" "$tmpdir" $tagger --iterations 10 --seed $seed --labels="back-up,not back-up"
   # test
   postTrain $tagger
 }
 
+# requires: sample.spine sample.outline imported.label test_train_one_class_with_weights/*
 testTrainOneClassWithWeights() {
   tagger="test_train_one_class_with_weights$tagger_suffix"
   tmpdir=$(prepareTrainingData imported.label $tagger)
   # run (compared with testTrainOneClass, we also swap the labels)
   cd "$project_root"
-  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir\" $tagger --seed $seed --labels=\"not back-up,back-up\"" --class-weights 1,10
-  "$larvataggerjl" train "$maggotuba" "$tmpdir" $tagger --seed $seed --labels="not back-up,back-up" --class-weights 1,10
+  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir\" $tagger --iterations 10 --seed $seed --labels=\"not back-up,back-up\"" --class-weights 1,10
+  "$larvataggerjl" train "$maggotuba" "$tmpdir" $tagger --iterations 10 --seed $seed --labels="not back-up,back-up" --class-weights 1,10
   # test
   postTrain $tagger
 }
 
+# requires: sample.spine sample.outline gui_imported.label test_train_one_class_with_encoder/*
 testTrainOneClassWithEncoder() {
   tagger="test_train_one_class_with_encoder$tagger_suffix"
   tmpdir=$(prepareTrainingData gui_imported.label $tagger trx.mat)
   # run
   cd "$project_root"
-  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir\" $tagger --seed $seed --labels=\"hunch,¬hunch\" --pretrained-model=20230524-hunch-25 --balancing-strategy=maggotuba"
-  "$larvataggerjl" train "$maggotuba" "$tmpdir" $tagger --seed $seed --labels="hunch,¬hunch" --pretrained-model=20230524-hunch-25 --balancing-strategy=maggotuba
+  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir\" $tagger --iterations 10 --seed $seed --labels=\"hunch,¬hunch\" --pretrained-model=20230524-hunch-25 --balancing-strategy=maggotuba"
+  "$larvataggerjl" train "$maggotuba" "$tmpdir" $tagger --iterations 10 --seed $seed --labels="hunch,¬hunch" --pretrained-model=20230524-hunch-25 --balancing-strategy=maggotuba
   # test
   postTrain $tagger
 }
 
+# requires: sample.spine sample.outline trx.mat gui_imported.label original_predictions.label test_train_selected_files/*
 testTrainSelectedFiles() {
   tagger="test_train_selected_files$tagger_suffix"
   tmpdir=$(prepareTrainingData gui_imported.label $tagger trx.mat original_predictions.label partial_predictions.label)
@@ -153,21 +173,22 @@ testTrainSelectedFiles() {
   (cd "$tmpdir" && find . -name gui_imported.label -print > filelist.txt)
   # run
   cd "$tmpdir"
-  echo "\"$project_root/$larvataggerjl\" train \"$project_root/$maggotuba\" filelist.txt $tagger --seed $seed --labels=\"run_large,cast_large,hunch_large\" --balancing-strategy=maggotuba --iterations=100"
-  "$project_root/$larvataggerjl" train "$project_root/$maggotuba" filelist.txt $tagger --seed $seed --labels="run_large,cast_large,hunch_large" --balancing-strategy=maggotuba --iterations=100
+  echo "\"$project_root/$larvataggerjl\" train \"$project_root/$maggotuba\" filelist.txt $tagger --seed $seed --labels=\"run_large,cast_large,hunch_large\" --balancing-strategy=maggotuba --iterations=10"
+  "$project_root/$larvataggerjl" train "$project_root/$maggotuba" filelist.txt $tagger --seed $seed --labels="run_large,cast_large,hunch_large" --balancing-strategy=maggotuba --iterations=10
   # test
   cd "$project_root"
   postTrain $tagger
 }
 
+# requires: sample.spine sample.outline trx.mat gui_imported.label original_predictions.label test_train_recursive_selection/*
 testTrainRecursiveSelection() {
   tagger="test_train_recursive_selection$tagger_suffix"
   tmpdir=$(prepareTrainingData gui_imported.label $tagger trx.mat original_predictions.label partial_predictions.label)
   mkdir "$tmpdir/subdir"; mv $tmpdir/*.label "$tmpdir/subdir/"; mv $tmpdir/trx.mat "$tmpdir/subdir/"
   # run
   cd "$project_root"
-  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir/**/gui_imported.label\" $tagger --seed $seed --labels=\"run_large,cast_large,hunch_large\" --balancing-strategy=maggotuba --iterations=100"
-  "$larvataggerjl" train "$maggotuba" "$tmpdir/**/gui_imported.label" $tagger --seed $seed --labels="run_large,cast_large,hunch_large" --balancing-strategy=maggotuba --iterations=100
+  echo "\"$larvataggerjl\" train \"$maggotuba\" \"$tmpdir/**/gui_imported.label\" $tagger --seed $seed --labels=\"run_large,cast_large,hunch_large\" --balancing-strategy=maggotuba --iterations=10"
+  "$larvataggerjl" train "$maggotuba" "$tmpdir/**/gui_imported.label" $tagger --seed $seed --labels="run_large,cast_large,hunch_large" --balancing-strategy=maggotuba --iterations=10
   # test
   postTrain $tagger
 }