From e2f5c415b7132ca9d16aafe828e99dd6758021bb Mon Sep 17 00:00:00 2001
From: fmareuil <fabien.mareuil@pasteur.fr>
Date: Fri, 29 Sep 2023 19:07:13 +0200
Subject: [PATCH] add API update method

---
 docker-compose.yaml                           |  16 +
 .../management/commands/build_distances.py    | 532 ++++++++++++++++++
 ...ty_options_alter_chain_options_and_more.py | 269 +++++++++
 ippisite/ippidb/models/targetcentric.py       |  26 +-
 ippisite/ippidb/serializer.py                 |  14 +-
 ippisite/ippidb/views/__init__.py             |   4 +-
 ippisite/ippidb/views/targetcentric.py        |  44 +-
 ippisite/ippisite/authentication.py           |  45 ++
 8 files changed, 929 insertions(+), 21 deletions(-)
 create mode 100644 ippisite/ippidb/management/commands/build_distances.py
 create mode 100644 ippisite/ippidb/migrations/0069_contributor_alter_cavity_options_alter_chain_options_and_more.py
 create mode 100644 ippisite/ippisite/authentication.py

diff --git a/docker-compose.yaml b/docker-compose.yaml
index 47f7a43c..efaaf0e8 100644
--- a/docker-compose.yaml
+++ b/docker-compose.yaml
@@ -22,6 +22,8 @@ services:
 #    command: python manage.py runserver 0.0.0.0:8000 # only if you have issues with gunicorn
     volumes:
       - ./ippisite:/code # for dev purpose only !!!
+      #- mediafiles:/code/persistent/media
+      - staticfiles:/static_root
     ports:
       - "8000:8000"
     depends_on:
@@ -43,11 +45,25 @@ services:
       - DJANGO_DEBUG=False
     volumes:
       - ./ippisite:/code # for dev purpose only !!!
+      #- mediafiles:/code/persistent/media
     depends_on:
       - db-local
       - redis-local
       - django # in order to have the migration applied in the db
 
+  nginx:
+    image: nginx:1.19-alpine
+    volumes:
+      - ./nginx/nginx.conf:/etc/nginx/conf.d/default.conf:ro
+      - staticfiles:/static
+      - ./ippisite/persistent/media:/media
+    ports:
+      - "8080:8080"
+    #  - "8090:8090"
+    depends_on:
+      - django
 
 volumes:
   ippidb-dev-db-data:
+  #mediafiles:
+  staticfiles:
diff --git a/ippisite/ippidb/management/commands/build_distances.py b/ippisite/ippidb/management/commands/build_distances.py
new file mode 100644
index 00000000..c16b8a4b
--- /dev/null
+++ b/ippisite/ippidb/management/commands/build_distances.py
@@ -0,0 +1,532 @@
+"""
+Command to build matrix distances
+"""
+
+import os
+import sys
+import json
+from xml.etree.ElementTree import ParseError
+import datetime
+import numpy as np
+import pandas as pd
+from celery import states
+from django.core.management.base import AppCommand
+from django.db.utils import IntegrityError
+# from django.db import transaction
+from django.db.models import Q
+from sklearn.neighbors import NearestNeighbors
+from django.db.models.aggregates import Avg, StdDev, Max, Min
+from ippidb.models.targetcentric import (
+    Cavity,
+    Partner,
+    Chain,
+    PDB,
+    Ligand,
+    Distance,
+    MetaInformation,
+)
+from ippidb.models.ippidb import Protein
+from ippidb.ws import (
+    # convert_iupac_to_smiles,
+    convert_pdb_ligand_id_to_iupacandsmiles,
+    EntryNotFoundError,
+)
+from ippidb.management.commands import TaskOutWrapper
+
+
+def normalize_data(indata):
+    std = indata.std(axis=0)
+    std[std == 0] = 1.0
+    mu = indata.mean(axis=0)
+    return (indata - mu) / std
+
+
+def load_data(ppc):
+    """
+    >>> data, name = load_data("PPC-HD-PL_dataset_07-01-2019PDBe_matrix.csv")
+    >>> data.shape
+    (57, 109)
+    """
+    colnames = set(ppc.columns) - set(["Cavity"])
+    sel_log = set(["PMI1", "PMI2", "PMI3", "Rgyr", "Volume"])
+    sel_atanh = set(
+        [
+            "NPR1",
+            "NPR2",
+            "Asphericity",
+            "Eccentricity",
+            "InertialShapeFactor",
+            "SpherocityIndex",
+        ]
+    )
+    sel_atanh_100 = colnames - sel_log - sel_atanh
+    ppc[list(sel_log)] = np.log10(ppc[list(sel_log)])
+    ppc[list(sel_atanh)] = np.arctanh(ppc[list(sel_atanh)])
+    ppc[list(sel_atanh_100)] = np.arctanh(ppc[list(sel_atanh_100)] / 100.0)
+
+    names = ppc.Cavity
+    data = ppc[list(colnames)].values
+    error_indices = []
+    for cav in range(len(data)):
+        if np.isinf(data[cav]).sum():
+            error_indices.append(cav)
+        if np.isnan(data[cav]).sum():
+            error_indices.append(cav)
+    # Clean error entry data
+    new_data = [i for j, i in enumerate(data) if j not in error_indices]
+    new_names = [i for j, i in enumerate(names) if j not in error_indices]
+    cavitynames_error = []
+    for ind in error_indices:
+        cavitynames_error.append(names[ind])
+
+    data = np.array(new_data)
+    names = new_names
+    data = normalize_data(data)
+    return data, names, cavitynames_error
+
+
+def normalize_cavity_table():
+    cavities = Cavity.objects.all().values_list()
+    df = pd.DataFrame(list(cavities))
+
+
+def knn(data, nb_neighbors):
+    nbrs = NearestNeighbors(n_neighbors=nb_neighbors, algorithm="ball_tree").fit(data)
+    distances, indices = nbrs.kneighbors(data)
+    return distances, indices
+
+
+def dir_path(string):
+    """
+    Test if a path is a directory
+    """
+    if os.path.isdir(string):
+        return string
+    else:
+        raise NotADirectoryError(string)
+
+
+def file_exist(string):
+    """
+    Test if a file exist
+    """
+    if os.path.exists(string):
+        return string
+    else:
+        raise FileNotFoundError(string)
+
+
+class Command(AppCommand):
+    """
+    Command to import cavity descriptors,
+    we use TaskOutWrapper to manage update std_out and std_err
+    """
+
+    help = """Load Cavity descriptors from global csv file and complet directory tree.
+    Please before import cavity descriptors clean the old data with the command python manage.py clean_targetcentric"""
+
+    def __init__(
+        self, stdout=None, stderr=None, no_color=False, force_color=False, task=None,
+    ):
+        super(Command, self).__init__(
+            stdout=stdout, stderr=stderr, no_color=no_color, force_color=force_color
+        )
+        if task:
+            task.update_state(state=states.STARTED)
+            self.stdout = TaskOutWrapper(stdout or sys.stdout, task=task, std_out=True)
+            self.stderr = TaskOutWrapper(stderr or sys.stderr, task=task, std_err=True)
+
+    def add_arguments(self, parser):
+        parser.add_argument(
+            "-n",
+            "--nb_neighbors",
+            type=int,
+            help="Number of nearest neighbors to use for saved distances between cavities",
+        )
+
+    def get_iupac_from_pdb(self, content_file, code_ligand):
+        """
+        Parse Hetnam from pdb by using this record forma:
+        COLUMNS       DATA  TYPE    FIELD           DEFINITION
+        ----------------------------------------------------------------------------
+        1 -  6       Record name   "HETNAM"
+        9 - 10       Continuation  continuation    Allows concatenation of multiple records.
+        12 - 14       LString(3)    hetID           Het identifier, right-justified.
+        16 - 70       String        text            Chemical name.
+        """
+        lines = content_file.split("\n")
+        iupac = []
+        for line in lines:
+            if line[0:6] == "HETNAM" and line[11:14] == code_ligand:
+                index = line[8:10].strip()
+                if index:
+                    iupac.insert(int(index) - 1, line[15:70].strip())
+                else:
+                    iupac.append(line[15:70].strip())
+        return "".join(iupac)
+
+    def init_cavity(self, verbose_name, uniprot_lig, dir_pdbs):
+        """
+        Initialize a Cavity object with the cavity verbose_name before save
+        all the descriptor a verbose_name can be '3spf-A-B50-501_CAVITY_N1'
+        or '1bxl-AB-Q07817-Q16611-withinA_CAVITY_N1'
+        """
+
+        splitted = verbose_name.split("-")
+        pdb_code = splitted[0]
+        chains = splitted[1]
+        cavity_number = splitted[-1].split("_")[-1].strip("N")
+        results_path = "{}/{}/results".format(dir_pdbs, pdb_code)
+        if len(splitted) == 4:
+            # Protein Ligand
+            self.stdout.write(
+                self.style.WARNING(
+                    "Start import protein ligand cavity {}".format(verbose_name)
+                )
+            )
+            code_partner = splitted[2]
+            chain_uniprot = []
+            for unipdb in uniprot_lig[pdb_code]:
+                if unipdb["chain"] == chains:
+                    chain_uniprot.append(unipdb["uniprot"])
+            if len(set(chain_uniprot)) > 1:
+                self.stdout.write(
+                    self.style.WARNING(
+                        "WARNING more than one uniprot code for pdb {}, chain {}".format(
+                            pdb_code, chains
+                        )
+                    )
+                )
+            chain_uniprot = chain_uniprot[0]
+            chain_pdb_id = chains
+            partner_pdb_id = splitted[-1].split("_")[0]
+            base_name_file = "-".join(splitted[0:2])
+            pdb_path = "{}/{}-{}.pdb".format(
+                results_path, base_name_file, chain_uniprot
+            )
+            pdb_path_chain = "{}/{}-{}.pdb".format(
+                results_path, base_name_file, chain_uniprot
+            )
+            pdb_path_partner = "{}/{}/{}-{}-{}.pdb".format(
+                dir_pdbs, pdb_code, base_name_file, code_partner, partner_pdb_id
+            )
+            with open(pdb_path, "r") as pdbfile:
+                file_content = pdbfile.read()
+            self.stdout.write(self.style.WARNING("PDB creation {}".format(pdb_code)))
+            pdb, created = PDB.objects.get_or_create(code=pdb_code,)
+            pdb.file_content = file_content
+            pdb.save()
+            # iupac = self.get_iupac_from_pdb(file_content, code_partner)
+            # try:
+            #    smiles = convert_iupac_to_smiles(iupac)
+            # except EntryNotFoundError:
+            #    smiles = ""
+            iupac, smiles = convert_pdb_ligand_id_to_iupacandsmiles(code_partner)
+            with open(pdb_path_partner, "r") as compoundfile:
+                file_content_compound = compoundfile.read()
+            self.stdout.write(
+                self.style.WARNING(
+                    "Ligand partner creation ligand:{} {}, pdb: {}, smile: {}".format(
+                        code_partner, partner_pdb_id, pdb.code, smiles
+                    )
+                )
+            )
+            partner_ligand, created = Ligand.objects.get_or_create(
+                supplementary_id=partner_pdb_id, pdb_ligand_id=code_partner, pdb=pdb,
+            )
+            partner_ligand.iupac_name = iupac
+            partner_ligand.canonical_smile = smiles
+            partner_ligand.file_content = file_content_compound
+            partner_ligand.save()
+            self.stdout.write(
+                self.style.WARNING(
+                    "Partner creation, ligand: {}".format(partner_ligand.pdb_ligand_id)
+                )
+            )
+            partner, created = Partner.objects.get_or_create(ligand=partner_ligand)
+        elif len(splitted) == 5:
+            # Heterodimer
+            self.stdout.write(
+                self.style.WARNING(
+                    "Start import heterodimer cavity {}".format(verbose_name)
+                )
+            )
+            within = splitted[-1].split("_")[0].strip("within")
+            chain_index = chains.index(within)
+            chain_uniprot = splitted[chain_index + 2]
+            chain_pdb_id = chains[chain_index]
+            if chain_index == 0:
+                code_index = 1
+            else:
+                code_index = 0
+            code_partner = splitted[code_index + 2]
+            partner_pdb_id = chains[code_index]
+            base_name_file = "-".join(splitted[0:4])
+            pdb_path = "{}/{}.pdb".format(results_path, base_name_file)
+            pdb_path_chain = "{}/{}-{}.pdb".format(
+                results_path, base_name_file, chain_pdb_id
+            )
+            pdb_path_partner = "{}/{}-{}.pdb".format(
+                results_path, base_name_file, partner_pdb_id
+            )
+            with open(pdb_path, "r") as pdbfile:
+                file_content = pdbfile.read()
+            self.stdout.write(self.style.WARNING("PDB creation {}".format(pdb_code)))
+            pdb, created = PDB.objects.get_or_create(code=pdb_code,)
+            pdb.file_content = file_content
+            pdb.save()
+            try:
+                self.stdout.write(
+                    self.style.WARNING(
+                        "Protein partner creation {}".format(code_partner)
+                    )
+                )
+                partner_protein, created = Protein.objects.get_or_create(
+                    uniprot_id=code_partner
+                )
+            except (
+                IndexError,
+                TypeError,
+                IntegrityError,
+                EntryNotFoundError,
+                ParseError,
+                json.decoder.JSONDecodeError,
+            ) as err:
+                self.stdout.write(
+                    self.style.WARNING(
+                        "WARNING impossible to create Protein {}, error {}".format(
+                            chain_uniprot, err
+                        )
+                    )
+                )
+                return None
+            with open(pdb_path_partner, "r") as partnerfile:
+                file_content_partner = partnerfile.read()
+            self.stdout.write(
+                self.style.WARNING(
+                    "Chain partner creation, pdb: {}, chain: {}, uniprot: {}".format(
+                        pdb.code, partner_pdb_id, partner_protein.uniprot_id
+                    )
+                )
+            )
+            partner_chain, created = Chain.objects.get_or_create(
+                pdb_chain_id=partner_pdb_id, pdb=pdb,
+            )
+            partner_chain.protein = partner_protein
+            partner_chain.file_content = file_content_partner
+            partner_chain.save()
+            self.stdout.write(
+                self.style.WARNING(
+                    "Partner creation, chain: {}".format(partner_chain.pdb_chain_id)
+                )
+            )
+            partner, created = Partner.objects.get_or_create(chain=partner_chain)
+        else:
+            raise NameError("Cavity full name is not conform")
+        try:
+            self.stdout.write(
+                self.style.WARNING("Protein creation {}".format(chain_uniprot))
+            )
+            protein, created = Protein.objects.get_or_create(uniprot_id=chain_uniprot)
+        except (
+            IndexError,
+            TypeError,
+            IntegrityError,
+            EntryNotFoundError,
+            ParseError,
+            json.decoder.JSONDecodeError,
+        ) as err:
+            self.stdout.write(
+                self.style.WARNING(
+                    "WARNING impossible to create Protein {}, error {}".format(
+                        chain_uniprot, err
+                    )
+                )
+            )
+            return None
+        with open(pdb_path_chain, "r") as f:
+            file_content_chain = f.read()
+        self.stdout.write(
+            self.style.WARNING(
+                "Chain creation, pdb: {}, chain: {}, uniprot: {}".format(
+                    pdb.code, chain_pdb_id, protein.uniprot_id
+                )
+            )
+        )
+        chain, created = Chain.objects.get_or_create(
+            pdb_chain_id=chain_pdb_id, pdb=pdb,
+        )
+        chain.protein = protein
+        chain.file_content = file_content_chain
+        chain.save()
+        mol2_path = "{}/{}-{}/CAVITY_N{}_ALL_orthosteric.mol2".format(
+            results_path,
+            "-".join(splitted[0:-1]),
+            splitted[-1].split("_")[0],
+            cavity_number,
+        )
+        with open(mol2_path, "r") as mol2file:
+            mol2_content = mol2file.read()
+        try:
+            cavity = Cavity.objects.get(
+                full_name=verbose_name,
+                chain=chain,
+                cavity_number=cavity_number,
+                partner=partner,
+            )
+            self.stdout.write(
+                self.style.WARNING(
+                    "WARNING Cavity {}, already exist".format(verbose_name)
+                )
+            )
+            return None
+        except Cavity.DoesNotExist:
+            self.stdout.write(
+                self.style.SUCCESS(
+                    "Info Cavity {}, doesn't already exist".format(verbose_name)
+                )
+            )
+            cavity = Cavity(
+                full_name=verbose_name,
+                mol2=mol2_content,
+                chain=chain,
+                cavity_number=cavity_number,
+                partner=partner,
+            )
+            return cavity
+
+    def handle(self, *args, **options):
+        """
+        Perform the command's actions
+        """
+        update_aggregate_only = False
+        if not update_aggregate_only:
+            csv = pd.read_csv(options["csv"], sep=",")
+            dirpath = options["path"]
+            uniprot_json_file = options["ligand_uniprot"]
+            with open("{}".format(uniprot_json_file), "r") as jsonfile:
+                data = json.load(jsonfile)
+            # with transaction.atomic():
+            for index, row in csv.iterrows():
+                try:
+                    new_cavity = self.init_cavity(row["Cavity"], data, dirpath)
+                except FileNotFoundError as err:
+                    self.stdout.write(
+                        self.style.WARNING("File Not Found: {}".format(err))
+                    )
+                    new_cavity = None
+                if new_cavity:
+                    for name, attr in row.items():
+                        if name != "Cavity":
+                            setattr(
+                                new_cavity, name.lower().replace("-", "_"), attr
+                            )
+                    new_cavity.save()
+                    self.stdout.write(
+                        self.style.SUCCESS(
+                            "Cavity {} successfully imported".format(
+                                row["Cavity"]
+                            )
+                        )
+                    )
+        if not update_aggregate_only:
+            self.stdout.write(self.style.SUCCESS("Matrix preparation"))
+            data, names, cavities_error = normalize_cavity_table()
+            for cavity_error in cavities_error:
+                self.stdout.write(
+                    self.style.WARNING(
+                        """WARNING Cavity {}: an error appeared during the normalization matrix.
+                            This cavity was not taken into account in order to normalize the matrix.
+                            Please check the cavity before recalculating the distance matrix.
+                            You can use the clean_targetcentric command to clean up the database
+                            if you delete the cavity or the import_targetcentric command
+                            to update the matrix if you correct the cavity""".format(
+                            cavity_error
+                        )
+                    )
+                )
+            distances, indices = knn(data, options["nb_neighbors"])
+            for cav1_index, name in enumerate(names):
+                bulk_distance_create = []
+                bulk_distance_update = []
+                try:
+                    cavity1 = Cavity.objects.get(full_name=name)
+                except Cavity.DoesNotExist:
+                    cavity1 = None
+                if cavity1:
+                    self.stdout.write(
+                        self.style.SUCCESS(
+                            "{}: Distances for cavity {}".format(
+                                datetime.datetime.now(), name
+                            )
+                        )
+                    )
+                    for dist_index, cav2_index in enumerate(indices[cav1_index]):
+                        try:
+                            cavity2 = Cavity.objects.get(
+                                full_name=names[cav2_index]
+                            )
+                        except Cavity.DoesNotExist:
+                            cavity2 = None
+                        if cavity2 and cavity1 != cavity2:
+                            if distances[cav1_index][dist_index] == 0:
+                                self.stdout.write(
+                                    self.style.WARNING(
+                                        "WARNING Distance between {} and {}, is null".format(
+                                            cavity1.full_name, cavity2.full_name
+                                        )
+                                    )
+                                )
+                            try:
+                                dist = Distance.objects.get(
+                                    Q(cavity1=cavity1, cavity2=cavity2)
+                                    | Q(cavity1=cavity2, cavity2=cavity1)
+                                )
+                                if (
+                                    dist.distance
+                                    != distances[cav1_index][dist_index]
+                                ):
+                                    dist.distance = distances[cav1_index][
+                                        dist_index
+                                    ]
+                                    bulk_distance_update.append(dist)
+                            except Distance.DoesNotExist:
+                                dist = Distance(
+                                    cavity1=cavity1,
+                                    cavity2=cavity2,
+                                    distance=distances[cav1_index][dist_index],
+                                )
+                                bulk_distance_create.append(dist)
+                            except Distance.MultipleObjectsReturned as err:
+                                self.stdout.write(
+                                    self.style.ERROR(
+                                        "ERROR: Multiple distances for {}, {}: {}".format(
+                                            name, names[cav2_index], err
+                                        )
+                                    )
+                                )
+                    Distance.objects.bulk_create(bulk_distance_create)
+                    Distance.objects.bulk_update(bulk_distance_update, ["distance"])
+        average_std = Distance.objects.aggregate(
+            average=Avg("distance"),
+            std=StdDev("distance"),
+            maximum=Max("distance"),
+            minimum=Min("distance"),
+        )
+        metainfo = MetaInformation.objects.all()
+        if metainfo.count() > 1:
+            metainfo.delete()
+        elif metainfo.count() == 0:
+            MetaInformation.objects.create(
+                average=average_std["average"],
+                std=average_std["std"],
+                maximum=average_std["maximum"],
+                minimum=average_std["minimum"],
+            )
+        else:
+            metainfofirst = metainfo.first()
+            metainfofirst.average = average_std["average"]
+            metainfofirst.std = average_std["std"]
+            metainfofirst.maximum = average_std["maximum"]
+            metainfofirst.minimum = average_std["minimum"]
+            metainfofirst.save()
diff --git a/ippisite/ippidb/migrations/0069_contributor_alter_cavity_options_alter_chain_options_and_more.py b/ippisite/ippidb/migrations/0069_contributor_alter_cavity_options_alter_chain_options_and_more.py
new file mode 100644
index 00000000..419cf79d
--- /dev/null
+++ b/ippisite/ippidb/migrations/0069_contributor_alter_cavity_options_alter_chain_options_and_more.py
@@ -0,0 +1,269 @@
+# Generated by Django 4.2.5 on 2023-09-20 23:30
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+    dependencies = [
+        ('contenttypes', '0002_remove_content_type_name'),
+        ('auth', '0012_alter_user_first_name_max_length'),
+        ('ippidb', '0068_auto_20210310_1052'),
+    ]
+
+    operations = [
+        migrations.CreateModel(
+            name='Contributor',
+            fields=[
+            ],
+            options={
+                'proxy': True,
+                'indexes': [],
+                'constraints': [],
+            },
+            bases=('auth.user',),
+        ),
+        migrations.AlterModelOptions(
+            name='cavity',
+            options={'ordering': ['chain', 'cavity_number'], 'verbose_name_plural': 'Cavities'},
+        ),
+        migrations.AlterModelOptions(
+            name='chain',
+            options={'ordering': ['pdb', 'pdb_chain_id']},
+        ),
+        migrations.AddField(
+            model_name='partner',
+            name='type_binding',
+            field=models.CharField(default='orthosteric', max_length=50, verbose_name='Type of Binding'),
+        ),
+        migrations.AlterField(
+            model_name='bibliography',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='cavity',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='cellline',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='chain',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='compound',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='compoundaction',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='compoundactivityresult',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='compoundcytotoxicityresult',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='compoundjob',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='compoundpkresult',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='compoundpkresult',
+            name='tolerated',
+            field=models.BooleanField(null=True, verbose_name='Tolerated'),
+        ),
+        migrations.AlterField(
+            model_name='compoundtanimoto',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='contribution',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='disease',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='distance',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='domain',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='drugbankcompoundtanimoto',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='interactfile',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='job',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='lellebiplotdata',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='ligand',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='metainformation',
+            name='average',
+            field=models.DecimalField(decimal_places=8, help_text='partial matrix mean value', max_digits=11, verbose_name='Average'),
+        ),
+        migrations.AlterField(
+            model_name='metainformation',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='metainformation',
+            name='maximum',
+            field=models.DecimalField(decimal_places=8, help_text='partial matrix maximum value', max_digits=11, verbose_name='Maximum'),
+        ),
+        migrations.AlterField(
+            model_name='metainformation',
+            name='minimum',
+            field=models.DecimalField(decimal_places=8, help_text='partial matrix minimum value', max_digits=11, verbose_name='Minimum'),
+        ),
+        migrations.AlterField(
+            model_name='metainformation',
+            name='normalize_factor',
+            field=models.DecimalField(blank=True, decimal_places=8, default=4.5859973, help_text='complet matrix standard deviation', max_digits=11, null=True, verbose_name='Normalize Factor'),
+        ),
+        migrations.AlterField(
+            model_name='metainformation',
+            name='std',
+            field=models.DecimalField(decimal_places=8, help_text='partial matrix standard deviation', max_digits=11, verbose_name='Standard Deviation'),
+        ),
+        migrations.AlterField(
+            model_name='molecularfunction',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='network',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='partner',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='pcabiplotdata',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='pdb',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='ppi',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='ppicomplex',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='ppifamily',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='protein',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='proteindomaincomplex',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='proteindomaincomplex',
+            name='polymorphic_ctype',
+            field=models.ForeignKey(editable=False, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='polymorphic_%(app_label)s.%(class)s_set+', to='contenttypes.contenttype'),
+        ),
+        migrations.AlterField(
+            model_name='proteindomaincomplexgroup',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='refcompoundbiblio',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='symmetry',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='taxonomy',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='testactivitydescription',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='testcytotoxdescription',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterField(
+            model_name='testpkdescription',
+            name='id',
+            field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'),
+        ),
+        migrations.AlterUniqueTogether(
+            name='partner',
+            unique_together={('ligand', 'chain')},
+        ),
+    ]
diff --git a/ippisite/ippidb/models/targetcentric.py b/ippisite/ippidb/models/targetcentric.py
index ccbf3660..29e42050 100644
--- a/ippisite/ippidb/models/targetcentric.py
+++ b/ippisite/ippidb/models/targetcentric.py
@@ -2,8 +2,9 @@ from django.db import models
 from django.forms import ValidationError
 from django.db.models import Q
 from django.dispatch import receiver
-from django.db.models.signals import pre_delete, post_save
+from django.db.models.signals import pre_delete, post_save, pre_save
 from .ippidb import Protein
+from ippidb.ws import convert_pdb_ligand_id_to_iupacandsmiles
 import json
 
 
@@ -165,7 +166,6 @@ class Chain(models.Model):
             partner_message = "with partner "
         return "{}{}".format(partner_message, self.protein.recommended_name_long)
 
-
 class InteractFile(models.Model):
     chain = models.ForeignKey(Chain, on_delete=models.CASCADE)
     label = models.CharField(max_length=250, blank=False, null=False)
@@ -174,6 +174,9 @@ class InteractFile(models.Model):
         default=0.7, verbose_name="Default isolevel value"
     )
 
+    class Meta:
+        unique_together = [['chain', 'label']]
+
     def __unicode__(self):
         return "{}".format(self.interact_file)
 
@@ -223,6 +226,14 @@ class Ligand(models.Model):
         partner_message = "with ligand "
         return "%s%s_%s" % (partner_message, self.pdb_ligand_id, self.supplementary_id)
 
+    def save(self, *args, **kwargs):
+        super(Ligand, self).save(*args, **kwargs)
+        if not self.canonical_smile or not self.iupac_name:
+            iupac, smiles = convert_pdb_ligand_id_to_iupacandsmiles(self.pdb_ligand_id)
+            self.canonical_smile = smiles
+            self.iupac_name = iupac
+            self.save()
+
 
 class Partner(models.Model):
     """
@@ -231,6 +242,12 @@ class Partner(models.Model):
 
     ligand = models.ForeignKey(Ligand, on_delete=models.CASCADE, blank=True, null=True)
     chain = models.ForeignKey(Chain, on_delete=models.CASCADE, blank=True, null=True)
+    type_binding = models.CharField(
+        "Type of Binding", max_length=50, default="orthosteric", blank=False, null=False
+    )
+    class Meta:
+        unique_together = [['ligand', 'chain']]
+
 
     def save(self, *args, **kwargs):
         if self.ligand and self.chain:
@@ -509,3 +526,8 @@ def interactdelete(sender, instance, **kwargs):
                 interactfile.interact_file.name
             ):
                 interactfile.interact_file.delete()
+
+
+@receiver(pre_save, sender=Chain)
+def updatemrcfile(sender, instance, **kwargs):
+    print(instance.mrc_file.path, kwargs)
diff --git a/ippisite/ippidb/serializer.py b/ippisite/ippidb/serializer.py
index aca1fa6b..1df73491 100644
--- a/ippisite/ippidb/serializer.py
+++ b/ippisite/ippidb/serializer.py
@@ -114,7 +114,7 @@ class CavityDistanceSerializer(serializers.ModelSerializer):
 
     class Meta:
         model = Cavity
-        exclude = ("mol2",)
+        exclude = ('mol2',)
 
 
 class DistanceSerializer(serializers.ModelSerializer):
@@ -130,17 +130,19 @@ class CavitySerializer(serializers.ModelSerializer):
     class Meta:
         model = Cavity
         fields = "__all__"
+        write_only_fields = ("mol2",)
+        read_only_fields = ("t120",)
 
 
-class PartnerSerializer(serializers.ModelSerializer):
+class InteractFileSerializer(serializers.ModelSerializer):
     class Meta:
-        model = Partner
+        model = InteractFile
         fields = "__all__"
 
 
-class InteractFileSerializer(serializers.ModelSerializer):
+class PartnerSerializer(serializers.ModelSerializer):
     class Meta:
-        model = InteractFile
+        model = Partner
         fields = "__all__"
 
 
@@ -169,9 +171,11 @@ class ChainSerializer(serializers.ModelSerializer):
 class LigandSerializer(serializers.ModelSerializer):
     partner_set = PartnerSerializer(many=True, read_only=True)
 
+
     class Meta:
         model = Ligand
         fields = "__all__"
+        read_only_fields = ['canonical_smile', 'iupac_name']
 
 
 class PdbSerializer(serializers.ModelSerializer):
diff --git a/ippisite/ippidb/views/__init__.py b/ippisite/ippidb/views/__init__.py
index 81683b9a..a83fe149 100644
--- a/ippisite/ippidb/views/__init__.py
+++ b/ippisite/ippidb/views/__init__.py
@@ -23,7 +23,8 @@ from .targetcentric import (
     PartnerViewSet,
     InteractFileViewSet,
     CavityViewSet,
-    BuildDistancesViewSet)
+    BuildDistancesViewSet,
+    pocketome_html,)
 from .contribute import ippidb_wizard_view, ContributionDetailView
 from .compound_query import (
     CompoundListView,
@@ -134,4 +135,5 @@ __all__ = [
     get_json_network,
     IppiDbCompoundSitemap,
     IppiDbContributorsSitemap,
+    pocketome_html,
 ]
diff --git a/ippisite/ippidb/views/targetcentric.py b/ippisite/ippidb/views/targetcentric.py
index 065daced..f305a06d 100644
--- a/ippisite/ippidb/views/targetcentric.py
+++ b/ippisite/ippidb/views/targetcentric.py
@@ -36,6 +36,13 @@ from rest_framework.permissions import IsAdminUser, SAFE_METHODS
 from rest_framework.authtoken.models import Token
 from ippidb.tasks import launch_build_matrix_distance
 from rest_framework.response import Response
+from django.views.decorators.clickjacking import xframe_options_exempt
+
+from django.shortcuts import render
+
+@xframe_options_exempt
+def pocketome_html(request):
+    return render(request, 'PocketBook_MST_Euclidean_log_trim_scratch_sec61.html')
 
 
 class IsAdminUserOrReadOnly(IsAdminUser):
@@ -54,13 +61,12 @@ class PdbViewSet(viewsets.ModelViewSet):
 
     @action(detail=True, methods=["get", "post"])
     def get_queryset(self):
+        queryset = PDB.objects.all().values("code", "id")
         code = self.request.GET.get("code")
         if "pk" in self.kwargs:
             queryset = PDB.objects.filter(pk=self.kwargs["pk"])
         elif code:
             queryset = PDB.objects.filter(code=code)
-        else:
-            queryset = PDB.objects.all().values("code", "id")
         return queryset
  
 
@@ -70,11 +76,10 @@ class ProteinViewSet(viewsets.ModelViewSet):
 
     @action(detail=True, methods=["get", "post"])
     def get_queryset(self):
+        queryset = Protein.objects.all()
         uniprot_id = self.request.GET.get("uniprot_id")
         if uniprot_id:
-            queryset = Protein.objects.filter(uniprot_id=uniprot_id)
-        else:
-            queryset = Protein.objects.all()
+            queryset = queryset.filter(uniprot_id=uniprot_id)
         return queryset
 
 
@@ -84,15 +89,14 @@ class ChainViewSet(viewsets.ModelViewSet):
 
     @action(detail=True, methods=["get", "post"])
     def get_queryset(self):
+        queryset = Chain.objects.all()
         pdb = self.request.GET.get("pdb_id")
         pdb_chain_id = self.request.GET.get("pdb_chain_id")
         if pdb:
             if pdb_chain_id:
-                queryset = Chain.objects.filter(pdb=pdb, pdb_chain_id=pdb_chain_id)
+                queryset = queryset.filter(pdb=pdb, pdb_chain_id=pdb_chain_id)
             else:
-                queryset = Chain.objects.filter(pdb=pdb)
-        else:
-            queryset = Chain.objects.all()
+                queryset = queryset.filter(pdb=pdb)
         return queryset
 
 
@@ -108,13 +112,11 @@ class LigandViewSet(viewsets.ModelViewSet):
         supplementary_id = self.request.GET.get("supplementary_id")
         if pdb:
             if pdb_ligand_id and supplementary_id:
-                queryset = Ligand.objects.filter(pdb=pdb, 
+                queryset = queryset.filter(pdb=pdb, 
                                                  pdb_ligand_id=pdb_ligand_id, 
                                                  supplementary_id=supplementary_id)
             else:
-                queryset = Ligand.objects.filter(pdb=pdb)
-        else:
-            queryset = Ligand.objects.all()
+                queryset = queryset.filter(pdb=pdb)
         return queryset
 
 
@@ -125,6 +127,12 @@ class PartnerViewSet(viewsets.ModelViewSet):
     @action(detail=True, methods=["get", "post"])
     def get_queryset(self):
         queryset = Partner.objects.all()
+        chain = self.request.GET.get("chain")
+        ligand = self.request.GET.get("ligand")
+        if chain:
+            queryset = queryset.filter(chain=chain)    
+        if ligand:
+            queryset = queryset.filter(ligand=ligand)
         return queryset
 
 
@@ -135,6 +143,13 @@ class InteractFileViewSet(viewsets.ModelViewSet):
     @action(detail=True, methods=["get", "post"])
     def get_queryset(self):
         queryset = InteractFile.objects.all()
+        chain = self.request.GET.get("chain")
+        label = self.request.GET.get("label")
+        if chain:
+            if label:
+                queryset = queryset.filter(chain=chain, label=label)
+            else:
+                queryset = queryset.filter(chain=chain)
         return queryset
 
 
@@ -145,6 +160,9 @@ class CavityViewSet(viewsets.ModelViewSet):
     @action(detail=True, methods=["get", "post"])
     def get_queryset(self):
         queryset = Cavity.objects.all()
+        full_name = self.request.GET.get("full_name")
+        if full_name:
+            queryset = queryset.filter(full_name=full_name)
         return queryset
 
 
diff --git a/ippisite/ippisite/authentication.py b/ippisite/ippisite/authentication.py
new file mode 100644
index 00000000..e49585d8
--- /dev/null
+++ b/ippisite/ippisite/authentication.py
@@ -0,0 +1,45 @@
+from rest_framework.authtoken.models import Token
+from rest_framework.authentication import TokenAuthentication
+from rest_framework.exceptions import AuthenticationFailed
+from datetime import timedelta
+from django.utils import timezone
+from django.conf import settings
+
+
+
+#this return left time
+def expires_in(token):
+    time_elapsed = timezone.now() - token.created
+    left_time = timedelta(seconds = settings.TOKEN_EXPIRED_AFTER_SECONDS) - time_elapsed
+    return left_time
+
+# token checker if token expired or not
+def is_token_expired(token):
+    return expires_in(token) < timedelta(seconds = 0)
+
+def token_expire_handler(token):
+    is_expired = is_token_expired(token)
+    if is_expired:
+        token.delete()
+        token = None
+    return is_expired, token
+
+class ExpiringTokenAuthentication(TokenAuthentication):
+    """
+    If token is expired then it will be removed
+    and new one with different key will be created
+    """
+    def authenticate_credentials(self, key):
+        try:
+            token = Token.objects.get(key = key)
+        except Token.DoesNotExist:
+            raise AuthenticationFailed("Invalid Token")
+        
+        if not token.user.is_active:
+            raise AuthenticationFailed("User is not active")
+
+        is_expired, token = token_expire_handler(token)
+        if is_expired:
+            raise AuthenticationFailed("The Token is expired, return on the admin interface to generate a new token")
+        
+        return (token.user, token)
\ No newline at end of file
-- 
GitLab