Commit cc84631c authored by Rachel TORCHET's avatar Rachel TORCHET
Browse files

Merge branch 'master' of gitlab.pasteur.fr:ippidb/ippidb-web

parents c5fa782c 21457ae3
Pipeline #28818 failed with stages
in 9 minutes and 33 seconds
ippisite/ippidb/static/academicons-1.8.6/* linguist-vendored
ippisite/ippidb/static/bootstrap-slider-master/* linguist-vendored
ippisite/ippidb/static/bootstrap/* linguist-vendored
ippisite/ippidb/static/chartjs/* linguist-vendored
ippisite/ippidb/static/fontawesome/* linguist-vendored
ippisite/ippidb/static/fonts/* linguist-vendored
ippisite/ippidb/static/jquery/* linguist-vendored
ippisite/ippidb/static/marvinjs-18/* linguist-vendored
ippisite/ippidb/static/smilesdrawer/* linguist-vendored
ippisite/ippidb/static/typeahead/* linguist-vendored
ippisite/ippidb/static/url-polyfill/* linguist-vendored
\ No newline at end of file
stages:
- test
- deploy
test-style:
image: python:3.6
stage: test
......@@ -9,6 +10,7 @@ test-style:
- cd ippisite
- pip install flake8
- flake8 --config=.flake8
test-ansible:
image: python:3.5
stage: test
......@@ -18,6 +20,7 @@ test-ansible:
- whoami
- ansible-playbook system.yaml --syntax-check
- ansible-playbook deploy.yaml --syntax-check
test-centos7:
services:
- redis
......@@ -56,6 +59,27 @@ test-centos7:
- python3.6 manage.py test
- coverage run --source='.' manage.py test
- coverage report
- coverage html
- pip3.6 install sphinx sphinx-argparse sphinxcontrib.bibtex sphinx_rtd_theme
- cd docs
- make html
artifacts:
paths:
- ippisite/htmlcov
- ippisite/docs/build/html
pages:
stage: deploy
dependencies:
- test-centos7
script:
- 'mkdir -p public/$CI_COMMIT_REF_NAME'
- 'mv ippisite/htmlcov public/$CI_COMMIT_REF_NAME/'
- 'mv ippisite/docs/build/html/ public/$CI_COMMIT_REF_NAME/'
artifacts:
paths:
- public
deploy-webserver-targetcentric:
image: python:3.5
stage: deploy
......@@ -78,6 +102,7 @@ deploy-webserver-targetcentric:
--extra-vars "deploy_user_name=ippidb repo_api_token=JZS-4cH7bWkFkHa2rAVf marvinjs_apikey=$MARVINJS_APIKEY_targetcentric galaxy_base_url=$GALAXY_BASE_URL_targetcentric galaxy_apikey=$GALAXY_APIKEY_targetcentric galaxy_compoundproperties_workflowid=$GALAXY_COMPOUNDPROPERTIES_WORKFLOWID_targetcentric secret_key=$SECRET_KEY_targetcentric dbname=$DBNAME_targetcentric dbuser=$DBUSER_targetcentric dbpassword=$DBPASSWORD_targetcentric dbhost=$DBHOST_targetcentric dbport=$DBPORT_targetcentric http_port=$HTTP_PORT_targetcentric branch=targetcentric"
only:
- targetcentric
deploy-webserver-test:
image: python:3.5
stage: deploy
......@@ -97,9 +122,10 @@ deploy-webserver-test:
- cd ansible
- whoami
- ansible-playbook -vvv -i ./hosts_master deploy.yaml
--extra-vars "deploy_user_name=ippidb repo_api_token=JZS-4cH7bWkFkHa2rAVf marvinjs_apikey=$MARVINJS_APIKEY_master galaxy_base_url=$GALAXY_BASE_URL_master galaxy_apikey=$GALAXY_APIKEY_master galaxy_compoundproperties_workflowid=$GALAXY_COMPOUNDPROPERTIES_WORKFLOWID_master secret_key=$SECRET_KEY_master dbname=$DBNAME_master dbuser=$DBUSER_master dbpassword=$DBPASSWORD_master dbhost=$DBHOST_master dbport=$DBPORT_master http_port=$HTTP_PORT_master branch=$CI_COMMIT_REF_NAME"
--extra-vars "deploy_user_name=ippidb repo_api_token=JZS-4cH7bWkFkHa2rAVf marvinjs_apikey=$MARVINJS_APIKEY_master galaxy_base_url=$GALAXY_BASE_URL_master galaxy_apikey=$GALAXY_APIKEY_master galaxy_compoundproperties_workflowid=$GALAXY_COMPOUNDPROPERTIES_WORKFLOWID_master secret_key=$SECRET_KEY_master dbname=$DBNAME_master dbuser=$DBUSER_master dbpassword=$DBPASSWORD_master dbhost=$DBHOST_master dbport=$DBPORT_master http_port=$HTTP_PORT_master branch=$CI_COMMIT_REF_NAME gacode=$GACODE_master"
only:
- master
deploy-webserver-production:
image: python:3.5
stage: deploy
......@@ -119,6 +145,6 @@ deploy-webserver-production:
- cd ansible
- whoami
- ansible-playbook -vvv -i ./hosts_release deploy.yaml
--extra-vars "deploy_user_name=ippidb repo_api_token=JZS-4cH7bWkFkHa2rAVf marvinjs_apikey=$MARVINJS_APIKEY_release galaxy_base_url=$GALAXY_BASE_URL_release galaxy_apikey=$GALAXY_APIKEY_release galaxy_compoundproperties_workflowid=$GALAXY_COMPOUNDPROPERTIES_WORKFLOWID_release secret_key=$SECRET_KEY_release dbname=$DBNAME_release dbuser=$DBUSER_release dbpassword=$DBPASSWORD_release dbhost=$DBHOST_release dbport=$DBPORT_release http_port=$HTTP_PORT_release branch=$CI_COMMIT_REF_NAME"
--extra-vars "deploy_user_name=ippidb repo_api_token=JZS-4cH7bWkFkHa2rAVf marvinjs_apikey=$MARVINJS_APIKEY_release galaxy_base_url=$GALAXY_BASE_URL_release galaxy_apikey=$GALAXY_APIKEY_release galaxy_compoundproperties_workflowid=$GALAXY_COMPOUNDPROPERTIES_WORKFLOWID_release secret_key=$SECRET_KEY_release dbname=$DBNAME_release dbuser=$DBUSER_release dbpassword=$DBPASSWORD_release dbhost=$DBHOST_release dbport=$DBPORT_release http_port=$HTTP_PORT_release branch=$CI_COMMIT_REF_NAME gacode=$GACODE_release"
only:
- release
[Unit]
Description=Celery Service
Description=Celery Service for iPPI-DB running on port {{ http_port }}
After=network.target
[Service]
Type=forking
User=celery
User=celery-{{ http_port }}
Group=ippidb
EnvironmentFile=-/etc/default/ippidb-{{ http_port }}-celeryd
WorkingDirectory=/home/ippidb/ippidb-web-{{ http_port }}/ippisite
ExecStart=/bin/sh -c '${CELERY_BIN} multi start ${CELERYD_NODES} \
-Q ${CELERYD_QUEUE} \
-A ${CELERY_APP} --pidfile=${CELERYD_PID_FILE} \
--logfile=${CELERYD_LOG_FILE} --loglevel=${CELERYD_LOG_LEVEL} ${CELERYD_OPTS}'
ExecStop=/bin/sh -c '${CELERY_BIN} multi stopwait ${CELERYD_NODES} \
--pidfile=${CELERYD_PID_FILE}'
ExecReload=/bin/sh -c '${CELERY_BIN} multi restart ${CELERYD_NODES} \
-Q ${CELERYD_QUEUE} \
-A ${CELERY_APP} --pidfile=${CELERYD_PID_FILE} \
--logfile=${CELERYD_LOG_FILE} --loglevel=${CELERYD_LOG_LEVEL} ${CELERYD_OPTS}'
......
......@@ -6,8 +6,9 @@ CELERYD_OPTS="--time-limit=3000 --concurrency=1 --max-tasks-per-child=1"
CELERYD_LOG_FILE="/var/ippidb-{{ http_port }}-celery/celery%n%I.log"
CELERYD_PID_FILE="/var/ippidb-{{ http_port }}-celery/celery%n.pid"
CELERYD_LOG_LEVEL="DEBUG"
CELERYD_USER="celery"
CELERYD_USER="celery-{{ http_port }}"
CELERYD_GROUP="ippidb"
CELERYD_QUEUE="celery-{{ http_port }}"
CELERY_CREATE_DIRS=1
SYSTEMD_LOG_LEVEL=debug
DJANGO_SETTINGS_MODULE=ippisite.{{ ansible_hostname }}_settings
......
......@@ -10,7 +10,7 @@
selinux:
state: disabled
- name: Create celery user
user: name=celery groups={{ deploy_user_name }} append=yes state=present createhome=yes
user: name=celery-{{ http_port }} groups={{ deploy_user_name }} append=yes state=present createhome=yes
become: true
register: newuser
# Install basic non-virtualenv requirements
......@@ -130,11 +130,11 @@
- name: stop "generic" httpd service if relevant
systemd: state=stopped name=httpd
- name: stop iPPIDB service if relevant
systemd: state=stopped name=ippidb-web
systemd: state=stopped name=ippidb{{ http_port }}-web
#ignore fail (i.e. when service does not exist yet)
ignore_errors: yes
- name: stop celery service
systemd: state=stopped name=celery enabled=true
systemd: state=stopped name=celery-{{ http_port }} enabled=true
ignore_errors: yes
#
# Set up celery service
......@@ -160,7 +160,7 @@
- name: copy celery systemd service
template:
src: celery.service
dest: /lib/systemd/system/celery.service
dest: /lib/systemd/system/celery-{{ http_port }}.service
force: yes
owner: root
group: root
......@@ -214,6 +214,19 @@
}
}
marker: "# {mark} ANSIBLE MANAGED DATABASE SETTINGS"
- name: Configure the CELERY QUEUE to submit tasks to from Django
blockinfile:
path: "{{ checkout_path }}/ippisite/ippisite/{{ ansible_hostname }}_settings.py"
block: |
CELERY_TASK_DEFAULT_QUEUE = "celery-{{ http_port }}"
marker: "# {mark} ANSIBLE MANAGED CELERY DEFAULT TASK QUEUE"
- name: Add database settings to iPPI-DB settings
blockinfile:
path: "{{ checkout_path }}/ippisite/ippisite/{{ ansible_hostname }}_settings.py"
block: |
GA_CODE = "{{ gacode }}"
marker: "# {mark} ANSIBLE MANAGED GOOGLE ANALYTICS ID"
when: gacode is defined
- name: Add email/debug settings to iPPI-DB settings
blockinfile:
path: "{{ checkout_path }}/ippisite/ippisite/{{ ansible_hostname }}_settings.py"
......@@ -282,7 +295,7 @@
BABEL_LIBDIR: "/usr/lib64/openbabel/"
- name: create mod_wsgi configuration
django_manage:
command: "runmodwsgi --setup-only --port={{ http_port }} --user ippidb --group wheel --server-root={{ service_conf_path }}"
command: "runmodwsgi --setup-only --port={{ http_port }} --user ippidb --group wheel --server-root={{ service_conf_path }} --url-alias /media {{ checkout_path }}/ippisite/media"
app_path: "{{ checkout_path }}/ippisite"
settings: "ippisite.{{ ansible_hostname }}_settings"
environment:
......@@ -338,4 +351,4 @@
# Start celery service
#
- name: start celery service if relevant
systemd: state=started name=celery enabled=true daemon_reload=true
systemd: state=started name=celery-{{ http_port }} enabled=true daemon_reload=true
......@@ -18,6 +18,10 @@
import os
import sys
import matplotlib
# avoid using tkinter with matplotlib
matplotlib.use('agg')
sys.path.insert(0, os.path.abspath('../..'))
import django
os.environ['DJANGO_SETTINGS_MODULE'] = 'ippisite.settings'
......@@ -208,5 +212,7 @@ napoleon_use_rtype = False # More legible
# The suffix of source filenames.
autosummary_generate = True
exclude_patterns = ['_build']
#do not try to import tkinter for sphinx
autodoc_mock_imports = ['_tkinter']
\ No newline at end of file
......@@ -27,6 +27,7 @@ from .models import (
Ppi,
ProteinDomainComplex,
Contribution,
Job,
)
from .tasks import launch_validate_contributions
......@@ -60,6 +61,62 @@ class ViewOnSiteModelAdmin(admin.ModelAdmin):
)
@admin.register(Job)
class JobModelAdmin(admin.ModelAdmin):
date_hierarchy = "task_result__date_done"
list_display = (
"task_result_task_name",
"task_result_task_id",
"task_result_status",
"task_result_date_created",
"task_result_date_done",
)
list_filter = (
"task_result__status",
"task_result__date_done",
"task_result__task_name",
)
readonly_fields = (
"task_result_task_name",
"task_result_task_id",
"task_result_status",
"task_result_date_created",
"task_result_date_done",
)
search_fields = (
"task_result__task_name",
"task_result__task_id",
"task_result__status",
)
fields = (
("task_result_task_name", "task_result_task_id"),
"task_result_status",
("task_result_date_created", "task_result_date_done"),
("std_out", "std_err"),
)
def task_result_task_id(self, x):
return x.task_result.task_id
def task_result_task_name(self, x):
return x.task_result.task_name
def task_result_date_done(self, x):
return x.task_result.date_done
def task_result_status(self, x):
return x.task_result.status
def task_result_date_created(self, x):
return x.task_result.date_created
task_result_task_id.short_description = "task_id"
task_result_task_name.short_description = "task_name"
task_result_date_done.short_description = "date_done"
task_result_status.short_description = "status"
task_result_date_created.short_description = "date_created"
@admin.register(Bibliography)
class BibliographyModelAdmin(ViewOnSiteModelAdmin):
list_display = ("authors_list", "title", "journal_name", "biblio_year", "id_source")
......@@ -134,7 +191,7 @@ class ContributionModelAdmin(ViewOnSiteModelAdmin):
def validate_contributions(self, request, queryset):
ids = [id for id in queryset.values_list("id", flat=True)]
launch_validate_contributions.delay(ids)
launch_validate_contributions(ids)
self.message_user(
request,
f"validation started for contributions(s) "
......
......@@ -465,9 +465,9 @@ class PpiModelForm(ModelForm):
label=_("Total number of pockets in the complex"), required=True
)
family_name = CharFieldDataList(
# data_class=models.PpiFamily,
data_class=models.PpiFamily,
data_list=[],
# data_attr='name',
data_attr="name",
label="PPI Family",
max_length=30,
required=True,
......@@ -839,7 +839,7 @@ class BaseInlineNestedFormSet(forms.BaseInlineFormSet):
class CompoundActivityResultForm(ModelForm):
compound_name = forms.ChoiceField(choices=(), required=True)
activity_mol = forms.DecimalField(
label="Activity", required=True, max_digits=12, decimal_places=10, min_value=0,
label="Activity", required=True, max_digits=15, decimal_places=10, min_value=0,
)
activity_unit = forms.CharField(
label="Activity unit",
......@@ -937,10 +937,20 @@ class CompoundActivityResultForm(ModelForm):
class CompoundActivityResultBaseInlineNestedFormSet(BaseInlineNestedFormSet):
__compound_names = []
# pIC50, pEC50, etc. activity types are labelled below as IC50, EC50, etc.
# specifically because the user enters them in these forms as the former
# value but they are converted to and stored in the latter.
__activity_types = [
("pIC50", "IC50 (half maximal inhibitory concentration)"),
("pEC50", "EC50 (half maximal effective concentration)"),
("pKd", "Kd (dissociation constant)"),
("pKi", "Ki (inhibition constant)"),
]
def add_fields(self, form, index):
super().add_fields(form, index)
form.fields["compound_name"].choices = self.__compound_names
form.fields["activity_type"].choices = self.__activity_types
def set_modulation_type(self, modulation_type):
for form in self.forms:
......@@ -1074,7 +1084,7 @@ class TestActivityDescriptionForm(forms.ModelForm):
def clean(self):
cleaned_data = super().clean()
if "test_type" in cleaned_data and cleaned_data["test_type"] == "CELL":
if "test_type" in cleaned_data and cleaned_data["test_type"] != "CELL":
cleaned_data["cell_line_name"] = ""
return cleaned_data
......@@ -1088,7 +1098,10 @@ class TestActivityDescriptionForm(forms.ModelForm):
"""
# right
if hasattr(self, "cleaned_data"):
if "cell_line_name" in self.cleaned_data:
if (
"cell_line_name" in self.cleaned_data
and self.cleaned_data["cell_line_name"] != ""
):
cell_line, created = models.CellLine.objects.get_or_create(
name=self.cleaned_data["cell_line_name"]
)
......
import argparse
from datetime import datetime
from itertools import islice
import json
import re
import time
import tempfile
from bioblend.galaxy import GalaxyInstance
from django.conf import settings
from django.core.management import BaseCommand
from django.forms.models import model_to_dict
import pandas as pd
import requests
from ippidb.models import Compound
from ippidb.utils import smi2sdf
# disable insecure HTTP request warnings (used by bioblend)
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
BASE_URL = settings.GALAXY_BASE_URL
KEY = settings.GALAXY_APIKEY
WORKFLOW_ID = settings.GALAXY_COMPOUNDPROPERTIES_WORKFLOWID
class GalaxyCompoundPropertiesRunner(object):
def __init__(self, galaxy_instance):
self.galaxy_instance = galaxy_instance
def compute_properties_for_sdf_file(self, sdf_file_path):
# create a history to store the workflow results
now = datetime.now()
date_time = now.strftime("%Y/%m/%d-%H:%M:%S")
history_name = "compoundpropertiesjobrun_%s" % date_time
history = self.galaxy_instance.histories.create_history(name=history_name)
history_id = history["id"]
if history["state"] not in ["new", "ok"]:
raise Exception(
f'Error creating history "{history_name}" (id {history_id})'
)
# launch data upload job
upload_response = self.galaxy_instance.tools.upload_file(
path=sdf_file_path, file_type="sdf", history_id=history_id
)
upload_data_id = upload_response["outputs"][0]["id"]
upload_job = upload_response["jobs"][0]
upload_job_id = upload_job["id"]
# monitor data upload until completed or on error
while upload_job["state"] not in ["ok"]:
time.sleep(2)
upload_job = self.galaxy_instance.jobs.show_job(upload_job_id)
if upload_job["state"] in ["error", "deleted", "discarded"]:
data = self.galaxy_instance.datasets.show_dataset(upload_data_id)
raise Exception(
f"Error during Galaxy data upload job - name : "
f"{data['name']}, id : {upload_data_id}, "
f"error : {data['misc_info']}"
)
# check uploaded dataset status
data = self.galaxy_instance.datasets.show_dataset(upload_data_id)
if data["state"] not in ["ok"]:
raise Exception(
f"Error during Galaxy data upload result - name : "
f"{data['name']}, id : {upload_data_id}, "
f"error : {data['misc_info']}"
)
# submit compound properties computation job
dataset_map = {"0": {"src": "hda", "id": upload_data_id}}
workflow_job = self.galaxy_instance.workflows.invoke_workflow(
WORKFLOW_ID, inputs=dataset_map, history_id=history_id
)
workflow_job_id = workflow_job["id"]
while workflow_job["state"] not in ["ok", "scheduled"]:
time.sleep(2)
workflow_job = self.galaxy_instance.workflows.show_invocation(
"dad6103ff71ca4fe", workflow_job_id
)
if workflow_job["state"] in ["error", "deleted", "discarded"]:
raise Exception(
f"Error during Galaxy workflow job - name : "
f"id : {workflow_job_id}, "
)
datasets = self.galaxy_instance.histories.show_history(
history_id, contents=True
)
actual_result_dataset = None
for dataset in datasets:
if dataset["extension"] == "json":
actual_result_dataset = dataset
if actual_result_dataset is None:
raise Exception(
f"Result for galaxy workflow invocation {workflow_job_id} not found in"
f" history {history_id}"
)
dataset = self.galaxy_instance.datasets.show_dataset(
actual_result_dataset["id"]
)
while dataset["state"] not in ["ok"]:
time.sleep(2)
dataset = self.galaxy_instance.datasets.show_dataset(
actual_result_dataset["id"]
)
download_url = dataset["download_url"]
contents_resp = requests.get(BASE_URL + download_url, verify=False)
contents = contents_resp.json()
return contents
def idrange_type(s, pat=re.compile(r"^(\d+)-(\d+)$")):
m = pat.match(s)
if not m:
raise argparse.ArgumentTypeError(
"please specify ID range as [start number]-[endnumber]"
)
return (int(m.groups()[0]), int(m.groups()[1]))
def dec(decimal_places):
def func(number):
return round(float(number), decimal_places)
return func
def chunks(data, size=10):
it = iter(data)
for i in range(0, len(data), size):
yield {k: data[k] for k in islice(it, size)}
class Command(BaseCommand):
help = "Compute compound physicochemical properties"
def add_arguments(self, parser):
parser.add_argument(
"mode", choices=["update", "compare", "print"], default="update"
)
selection = parser.add_mutually_exclusive_group(required=True)
selection.add_argument(
"--all", action="store_true", help="Process all compounds in the database"
)
selection.add_argument(
"--ids",
nargs="+",
type=int,
help="Process the compounds for the specified IDs",
)
selection.add_argument(
"--idrange",
type=idrange_type,
help="Process the compounds for the specified ID range",
)
parser.add_argument(
"--json",
type=argparse.FileType("r"),
help="Process precomputed results stored in a JSON file",
)
parser.add_argument(
"--xls", type=argparse.FileType("w"), help="Store results in Excel file"
)
def handle(self, *args, **options):
# select the compounds that need to be processed
smiles_dict = {}
compounds = []
pc_properties = {}
already_done_ids = []
if options["json"] is not None:
pc_properties_dict = json.load(open(options["json"].name, "r"))
ids = [
int(key)
for key, item in pc_properties_dict.items()
if "IUPAC" not in item
]
already_done_ids = [
int(key) for key, item in pc_properties_dict.items() if "IUPAC" in item
]
if options["all"] is True:
ids = Compound.objects.all().values("id")
elif options["ids"]:
ids = Compound.objects.filter(id__in=options["ids"]).values("id")
elif options["idrange"]:
ids = Compound.objects.filter(
id__gte=options["idrange"][0], id__lte=options["idrange"][1]
).values("id")
else:
compounds = Compound.objects.filter(iupac_name__isnull=True).values("id")
ids = [row["id"] for row in ids]
ids = list(set(ids) - set(already_done_ids))
compounds = Compound.objects.filter(id__in=ids)
for c in compounds:
smiles_dict[c.id] = c.canonical_smile
# create or reuse existing JSON file to save new results
if options["json"]:
json_file = options["json"].name
else:
json_fh = tempfile.NamedTemporaryFile(mode="w", delete=False)
json.dump({c.id: {} for c in compounds}, json_fh)
json_file = json_fh.name
json_fh.close()
self.stderr.write(self.style.SUCCESS(f"Compound properties file: {json_file}"))
if len(compounds) > 0:
self.stderr.write(f"Now processing {len(compounds)} compounds")
# set up Galaxy computation environment
gi = GalaxyInstance(url=BASE_URL, key=KEY, verify=False)
gi.nocache = True
runner = GalaxyCompoundPropertiesRunner(gi)
chunk_size = 3
for smiles_dict_chunk in chunks(smiles_dict, chunk_size):
# create SDF file for the selection
sdf_string = smi2sdf(smiles_dict_chunk)
fh = tempfile.NamedTemporaryFile(mode="w", delete=False)
fh.write(sdf_string)
fh.close()
self.stderr.write(
self.style.SUCCESS(
f"Galaxy input SDF file for compounds {smiles_dict_chunk.keys()}: {fh.name}"
)
)
# run computations on Galaxy
pc_properties = runner.compute_properties_for_sdf_file(fh.name)
new_pc_properties_dict = {
compound["Name"]: compound for compound in pc_properties
}
pc_properties_dict = json.load(open(json_file, "r"))
pc_properties_dict.update(new_pc_properties_dict)
fh = open(json_file, "w")
json.dump(pc_properties_dict, fh, indent=4)
fh.close()
self.stderr.write(
self.style.SUCCESS(
f"Properties added for compounds {smiles_dict_chunk.keys()} in JSON file: {json_file}"
)
)
# report and update database
property_mapping = {
"CanonicalSmile": ("canonical_smile", str),
"IUPAC": ("iupac_name", str),
"TPSA": ("tpsa", dec(2)),
"NbMultBonds": ("nb_multiple_bonds", int),
"BalabanIndex": ("balaban_index", dec(2)),
"NbDoubleBonds": ("nb_double_bonds", int),
"RDF070m": ("rdf070m", dec(2)),
"SumAtomPolar": ("sum_atom_polar", dec(2)),