Skip to content
Snippets Groups Projects
Commit e8e74d8f authored by Ivan Pavlovich's avatar Ivan Pavlovich
Browse files

Trying to get number of published articles on Pubmed (Problem: URI too long...

Trying to get number of published articles on Pubmed (Problem: URI too long 414, not resolved), started looking at Ollama, started looking at HuggingFace's TextGeneration models and started looking at Tokenizers for price estimations
parent 915bd066
No related branches found
No related tags found
No related merge requests found
File added
......@@ -61,24 +61,27 @@ for term in TERMS:
ncd_mesh = term + "[Mesh]"
print("TERM: ", ncd_mesh)
keywords_term = ""
for keyword in KEYWORDS:
data[term][keyword] = {}
keyword_term = keyword.replace(" ", "+").replace("&", "%26").replace("/", "%2F")
print("KEYWORD: ", keyword_term)
search_term = ncd_mesh + '+AND+"' + keyword_term + '"'
keywords_term += keyword_term + "+OR+"
search_term = ncd_mesh + "+AND+(+" + keywords_term + "+)"
print("SEARCH: ", search_term)
print("SEARCH: ", search_term)
for interval in INTERVALS:
print("INTERVAL: ", interval)
counts = get_count_for_year(2024, search_term, interval)
print(counts)
data[term][keyword][interval] = counts
for interval in INTERVALS:
print("INTERVAL: ", interval)
counts = get_count_for_year(2024, search_term, interval)
print(counts)
data[term][interval] = counts
with open(f"{RESULTS_DIR}/{term}.txt", "w+") as file:
print(data[term], file=file)
# with open(f"{RESULTS_DIR}/{term}.txt", "w+") as file:
# print(data[term], file=file)
print("DATA: ", data)
\ No newline at end of file
import sys
import os
from requests import get
from datetime import datetime, timedelta
import time
import urllib.parse
TERMS = [
'"Noncommunicable+Diseases"', # NCDs (All)
'"Diabetes+Mellitus"', # Diabetes (type 1 or 2)
'"Neoplasms"', # Cancer
'"Respiratory+Tract+Diseases"', # Chronic respiratory disease
'"Cardiovascular+Diseases"', # Cardiovascular diseases
'"Mental+Health"', # Mental Health
'"Diabetes+Mellitus%2C+Type+1"', # Diabetes type 1
'"Diabetes+Mellitus%2C+Type+2"' # Diabetes type 2
]
# Ajouter le répertoire parent au chemin de recherche
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "../../")))
from variables.pubmed import *
from dataSources.PubMed.util import *
RESULTS_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "./results"))
INTERVALS = [
"day",
"week",
# "day",
# "week",
"month"
]
......@@ -32,13 +32,24 @@ def get_count_for_year(year, term, interval = "month"):
elif interval == "month":
next_date = (current_date.replace(day=28) + timedelta(days=4)).replace(day=1)
url = f'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term={term}&retmode=json&mindate={current_date.strftime("%Y/%m/%d")}&maxdate={next_date.strftime("%Y/%m/%d")}&usehistory=y'
response = get(url)
search_res = response.json()
counts.append(int(search_res["esearchresult"]["count"]))
while (True):
try:
url = f'https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&api_key={PUBMED_API_KEY}&term={term}&retmode=json&mindate={current_date.strftime("%Y/%m/%d")}&maxdate={next_date.strftime("%Y/%m/%d")}'
print("Ok 1")
response = get(url)
print(response)
print("Ok 2")
search_res = response.json()
print("Ok 3")
counts.append(int(search_res["esearchresult"]["count"]))
current_date = next_date
time.sleep(1) # si plus de 3 requêtes par seconde sinon adresse IP bann (normalement)
print("Ok 4")
current_date = next_date
break
except Exception as e:
print(e)
time.sleep(0.2)
max_count = max(counts)
min_count = min(counts)
......@@ -46,17 +57,235 @@ def get_count_for_year(year, term, interval = "month"):
return {"max": max_count, "min": min_count, "avg": avg_count}
data = {}
interval = "month"
tot = 0
for ncd in NCDS:
search_term = url_encode(ncd)
print(f"SEARCH_TERM: {search_term}")
counts = get_count_for_year(2024, search_term, interval)
tot += counts["avg"]
search_term = url_encode(" OR ".join(NCDS))
print(f"SEARCH_TERM: {search_term}")
counts = get_count_for_year(2024, search_term, interval)
print(tot)
print(counts["avg"])
exit(0)
result = {}
ncds_mesh = get_mesh_term(NCDS_MESH_TERM)
ncds_mesh_noexp = get_mesh_noexp_term(NCDS_MESH_TERM)
print(url_encode(" OR ".join(ncds_mesh)))
print(url_encode(" OR ".join(ncds_mesh_noexp)))
keywords_mesh = get_mesh_term(KEYWORDS_MESH_TERM)
keywords_mesh_noexp = get_mesh_noexp_term(KEYWORDS_MESH_TERM)
print(url_encode(" OR ".join(keywords_mesh)))
print(url_encode(" OR ".join(keywords_mesh_noexp)))
keywords_mesh_site_proposition = get_mesh_term(KEYWORDS_MESH_SITE_PROPOSITION)
keywords_mesh_site_proposition_noexp = get_mesh_noexp_term(KEYWORDS_MESH_SITE_PROPOSITION)
print(url_encode(" OR ".join(keywords_mesh_site_proposition)))
print(url_encode(" OR ".join(keywords_mesh_site_proposition_noexp)))
keywords_mesh_proposition = get_mesh_term(KEYWORDS_MESH_PROPOSITION)
keywords_mesh_proposition_noexp = get_mesh_noexp_term(KEYWORDS_MESH_PROPOSITION)
print(url_encode(" OR ".join(keywords_mesh_proposition)))
print(url_encode(" OR ".join(keywords_mesh_proposition_noexp)))
keywords_hubheading = [ f'"{subheading}"[Subheading]' for subheading in KEYWORDS_MESH_SUBHEADING]
keywords_hubheading_noexp = [ f'"{subheading}"[Subheading:noexp]' for subheading in KEYWORDS_MESH_SUBHEADING]
print(url_encode(" OR ".join(keywords_hubheading)))
print(url_encode(" OR ".join(keywords_hubheading_noexp)))
for interval in INTERVALS:
result[interval] = {}
for ncd in NCDS:
result[interval][ncd] = {}
search_term = url_encode(ncd)
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd]["WITHOUT_KEYWORDS"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd} AND ( " + " OR ".join(KEYWORDS) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd]["KEYWORDS"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd} AND ( " + " OR ".join(keywords_mesh) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd]["KEYWORDS_MESH"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd} AND ( " + " OR ".join(keywords_mesh_noexp) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd]["KEYWORDS_MESH_NOEXP"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd} AND ( " + " OR ".join(keywords_mesh + keywords_hubheading + keywords_mesh_site_proposition + keywords_mesh_proposition) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd]["KEYWORDS_MESH_ALL"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd} AND ( " + " OR ".join(keywords_mesh_noexp + keywords_hubheading_noexp + keywords_mesh_site_proposition_noexp + keywords_mesh_proposition_noexp) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd]["KEYWORDS_MESH_NOEXP_ALL"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(" OR ".join(NCDS))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS"]["WITHOUT_KEYWORDS"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd} AND ( {" OR ".join(KEYWORDS)} ) )']
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS"]["KEYWORDS"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd} AND ( {" OR ".join(keywords_mesh)} ) )']
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS"]["KEYWORDS_MESH"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd} AND ( {" OR ".join(keywords_mesh_noexp)} ) )']
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS"]["KEYWORDS_MESH_NOEXP"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd} AND ( {" OR ".join(keywords_mesh + keywords_hubheading + keywords_mesh_site_proposition + keywords_mesh_proposition)} ) )']
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS"]["KEYWORDS_MESH_ALL"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd} AND ( {" OR ".join(keywords_mesh_noexp + keywords_hubheading_noexp + keywords_mesh_site_proposition_noexp + keywords_mesh_proposition_noexp)} ) )']
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS"]["KEYWORDS_MESH_NOEXP_ALL"] = get_count_for_year(2024, search_term, interval)
# ------------------------------------
for ncd_mesh in ncds_mesh:
result[interval][ncd_mesh] = {}
search_term = url_encode(ncd_mesh)
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh]["WITHOUT_KEYWORDS"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh} AND ( " + " OR ".join(KEYWORDS) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh]["KEYWORDS"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh} AND ( " + " OR ".join(keywords_mesh) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh]["KEYWORDS_MESH"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh} AND ( " + " OR ".join(keywords_mesh_noexp) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh]["KEYWORDS_MESH_NOEXP"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh} AND ( " + " OR ".join(keywords_mesh + keywords_hubheading + keywords_mesh_site_proposition + keywords_mesh_proposition) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh]["KEYWORDS_MESH_ALL"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh} AND ( " + " OR ".join(keywords_mesh_noexp + keywords_hubheading_noexp + keywords_mesh_site_proposition_noexp + keywords_mesh_proposition_noexp) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh]["KEYWORDS_MESH_NOEXP_ALL"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(" OR ".join(ncds_mesh))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH"]["WITHOUT_KEYWORDS"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh} AND ( {" OR ".join(KEYWORDS)} ) )' for ncd_mesh in ncds_mesh]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH"]["KEYWORDS"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh} AND ( {" OR ".join(keywords_mesh)} ) )' for ncd_mesh in ncds_mesh]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH"]["KEYWORDS_MESH"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh} AND ( {" OR ".join(keywords_mesh_noexp)} ) )' for ncd_mesh in ncds_mesh]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH"]["KEYWORDS_MESH_NOEXP"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh} AND ( {" OR ".join(keywords_mesh + keywords_hubheading + keywords_mesh_site_proposition + keywords_mesh_proposition)} ) )' for ncd_mesh in ncds_mesh]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH"]["KEYWORDS_MESH_ALL"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh} AND ( {" OR ".join(keywords_mesh_noexp + keywords_hubheading_noexp + keywords_mesh_site_proposition_noexp + keywords_mesh_proposition_noexp)} ) )' for ncd_mesh in ncds_mesh]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH"]["KEYWORDS_MESH_NOEXP_ALL"] = get_count_for_year(2024, search_term, interval)
# ----------------------------------------
for ncd_mesh_noexp in ncds_mesh_noexp:
result[interval][ncd_mesh_noexp] = {}
search_term = url_encode(ncd_mesh_noexp)
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh_noexp]["WITHOUT_KEYWORDS"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh_noexp} AND ( " + " OR ".join(KEYWORDS) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh_noexp]["KEYWORDS"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh_noexp} AND ( " + " OR ".join(keywords_mesh) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh_noexp]["KEYWORDS_MESH"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh_noexp} AND ( " + " OR ".join(keywords_mesh_noexp) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh_noexp]["KEYWORDS_MESH_NOEXP"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh_noexp} AND ( " + " OR ".join(keywords_mesh + keywords_hubheading + keywords_mesh_site_proposition + keywords_mesh_proposition) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh_noexp]["KEYWORDS_MESH_ALL"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(f"{ncd_mesh_noexp} AND ( " + " OR ".join(keywords_mesh_noexp + keywords_hubheading_noexp + keywords_mesh_site_proposition_noexp + keywords_mesh_proposition_noexp) + " )")
print(f"SEARCH_TERM: {search_term}")
result[interval][ncd_mesh_noexp]["KEYWORDS_MESH_NOEXP_ALL"] = get_count_for_year(2024, search_term, interval)
search_term = url_encode(" OR ".join(ncds_mesh_noexp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH NOEXP"]["WITHOUT_KEYWORDS"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh_noexp} AND ( {" OR ".join(KEYWORDS)} ) )' for ncd_mesh_noexp in ncds_mesh_noexp]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH NOEXP"]["KEYWORDS"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh_noexp} AND ( {" OR ".join(keywords_mesh)} ) )' for ncd_mesh_noexp in ncds_mesh_noexp]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH NOEXP"]["KEYWORDS_MESH"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh_noexp} AND ( {" OR ".join(keywords_mesh_noexp)} ) )' for ncd_mesh_noexp in ncds_mesh_noexp]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH NOEXP"]["KEYWORDS_MESH_NOEXP"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh_noexp} AND ( {" OR ".join(keywords_mesh + keywords_hubheading + keywords_mesh_site_proposition + keywords_mesh_proposition)} ) )' for ncd_mesh_noexp in ncds_mesh_noexp]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH NOEXP"]["KEYWORDS_MESH_ALL"] = get_count_for_year(2024, search_term, interval)
tmp = [f'( {ncd_mesh_noexp} AND ( {" OR ".join(keywords_mesh_noexp + keywords_hubheading_noexp + keywords_mesh_site_proposition_noexp + keywords_mesh_proposition_noexp)} ) )' for ncd_mesh_noexp in ncds_mesh_noexp]
search_term = url_encode(" OR ".join(tmp))
print(f"SEARCH_TERM: {search_term}")
result[interval]["ALL NCDS MESH NOEXP"]["KEYWORDS_MESH_NOEXP_ALL"] = get_count_for_year(2024, search_term, interval)
for term in TERMS:
data[term] = {}
mesh = term + "[Mesh]"
print("TERM: ", mesh)
print(result)
for interval in INTERVALS:
print("INTERVAL: ", interval)
counts = get_count_for_year(2024, mesh, interval)
print(counts)
data[term][interval] = counts
with open(f"{RESULTS_DIR}/results.json", "w+") as json_file:
json.dump(result, json_file, indent=4)
print(data)
\ No newline at end of file
print("END")
\ No newline at end of file
{
'Noncommunicable+Diseases': {
'month': {
'max': 47, 'min': 17, 'avg': 34.0
}
},
'Diabetes+Mellitus': {
'month': {
'max': 1622, 'min': 1015, 'avg': 1324.0833333333333
}
},
'Neoplasms': {
'month': {
'max': 8468, 'min': 5817, 'avg': 7558.916666666667
}
},
'Respiratory+Tract+Diseases': {
'month': {
'max': 4927, 'min': 3119, 'avg': 4292.583333333333
}
},
'Cardiovascular+Diseases': {
'month': {
'max': 4565, 'min': 2981, 'avg': 4083.0833333333335
}
},
'Mental+Health': {
'month': {
'max': 421, 'min': 269, 'avg': 358.0833333333333
}
},
'Diabetes+Mellitus%2C+Type+1': {
'month': {
'max': 221, 'min': 121, 'avg': 173.33333333333334
}
},
'Diabetes+Mellitus%2C+Type+2': {
'month': {
'max': 750, 'min': 447, 'avg': 608.4166666666666
}
}
}
\ No newline at end of file
This diff is collapsed.
import urllib.parse
def url_encode(text):
return urllib.parse.quote_plus(text, safe='[]():"')
def get_mesh_term(terms):
res = []
for item in terms.values():
if isinstance(item, list):
tmp = [f'"{i}"[Mesh]' for i in item]
res.append(" AND ".join(tmp))
else:
res.append(f'"{item}"[Mesh]')
return res
def get_mesh_noexp_term(terms):
res = []
for item in terms.values():
if isinstance(item, list):
tmp = [f'"{i}"[Mesh:noexp]' for i in item]
res.append(f'( {" AND ".join(tmp)} )')
else:
res.append(f'"{item}"[Mesh:noexp]')
return res
from transformers import pipeline
import torch
MODELS = [
"deepseek-ai/DeepSeek-V3" #https://huggingface.co/deepseek-ai/DeepSeek-V3
]
def create_generator(model = MODELS[0]):
print(f" CUDA available: {torch.cuda.is_available()}")
print(f"CUDA version: {torch.version.cuda}")
print(f"GPUs number: {torch.cuda.device_count()}")
device = 0 if torch.cuda.is_available() else -1
return pipeline("text-generation", model=model, device=device)
def generate(generator, sequence, debug = False):
results = generator(sequence)
if debug:
print(f"Sequence: {sequence}")
print(f"Labels: {results['labels']}")
print(f"Scores: {results['scores']}")
return results
generator = create_generator()
res = generate(generator, "Hi, how are you?")
print(res)
File added
from ollama import chat
from ollama import ChatResponse
response: ChatResponse = chat(model='llama3.2', messages=[
{
'role': 'user',
'content': 'Why is the sky blue?',
},
])
print(response['message']['content'])
# or access fields directly from the response object
print(response.message.content)
\ No newline at end of file
- https://github.com/ollama/ollama-python
- https://github.com/ollama/ollama
Check Bedrock:
- https://aws.amazon.com/bedrock/
Check Tokenizers:
- https://huggingface.co/docs/tokenizers/quicktour
Fine-tunning Zero-shot classifier:
- https://stackoverflow.com/questions/76213873/how-to-finetune-a-zero-shot-model-for-text-classification
TODO:
- Store data from Pubmed localy for NCDs
- Calculate data published
- Better testing script
- Get avrage token number for pubmed article
- Make prica estimations for LLMs
- Look and teste new LLMs (local and cloud)
\ No newline at end of file
File added
PUBMED_API_KEY = "63d31fa6fc74a5b632d3560046df75748208"
NCDS = [
"Non-Communicable Diseases",
"Diabetes",
"Cancer",
"Chronic respiratory disease",
"Cardiovascular diseases",
"Mental Health",
"Diabetes type 1",
"Diabetes type 2"
]
NCDS_MESH_TERM = {
'Non-Communicable Diseases': "Noncommunicable Diseases",
'Diabetes': "Diabetes Mellitus",
'Cancer': "Neoplasms",
'Chronic respiratory disease': "Respiratory Tract Diseases",
'Cardiovascular diseases': "Cardiovascular Diseases",
'Mental Health': "Mental Health",
'Diabetes type 1': "Diabetes Mellitus, Type 1",
'Diabetes type 2': "Diabetes Mellitus, Type 2"
}
KEYWORDS = [
"Availability",
"Affordability",
"Essential medecins",
"Care therapy",
"Care health",
"Health Expenditures",
"Health care costs",
"Market",
"Special populations",
"Child Health",
"Womens Health",
"Age",
"Minority",
"Primary Care",
"Specialty Care",
"Patient acceptance",
"Patient centered care",
"Prevention and control",
"Mass screening",
"Palliative care",
"Quality",
"Telemedicine",
"Digital health",
"Supplies",
"Human Resources",
"Enablers/barriers",
"Gender equity",
"Racial",
"Equity",
"Clinical",
"Health promotion",
"Health education",
"Research & Innovation",
"Therapeutic Development",
"Technological Development",
"Self-management",
"Self-monitoring",
"Dosing",
"Injections",
"Secondary Care",
"Integrated Care",
"Treatment management",
"Immunization",
"Vaccination",
"Adherence",
"Control",
"Rehabilitation services",
"Clinical guidelines",
"Health policy",
"Healthcare policy",
"National health policy",
"Regional health policy",
"Health legislation",
"Policy evaluation",
"Policy analysis",
"Policy formulation",
"Regulation",
"Governance",
"Global initiatives and organizations",
"Universal Health Care",
"Expansion",
"Health insurance",
"Coverage",
"Funding and investment",
"Health planning",
"Health reform",
"Policy monitoring",
"Public health campaign",
"Policy lobbying",
"Patient advocacy",
"Justice",
"Awareness campaign",
"Education",
"Corporate accountability",
"Social determinants of health",
"Empowerment",
"Community",
"Peer support",
"Civil society",
"Patient education",
"Parent education",
"Educational materials",
"Community heatlh education",
"Awareness",
"Community engagement",
"Health literacy",
"Medical education",
"Training program",
"Technology education",
"Medical devices",
"Information Dissemination",
"Behavioral change",
"Nutrition education",
"Risk communication",
"Sector integration"
]
KEYWORDS_MESH_TERM = {
'Health Expenditures': "Health Expenditures",
'Health care costs': "Health Care Costs",
'Patient centered care': "Patient-Centered Care",
'Mass screening': "Mass Screening",
'Palliative care': "Palliative Care",
'Telemedicine': "Telemedicine",
'Digital health': "Digital Health",
'Gender equity': "Gender Equity",
'Health promotion': "Health Promotion",
'Health education': "Health Education",
'Self-management': "Self-Management",
'Injections': "Injections",
'Secondary Care': "Secondary Care",
'Immunization': "Immunization",
'Vaccination': "Vaccination",
'Health policy': "Health Policy",
'Universal Health Care': "Universal Health Care",
'Health insurance': "Insurance, Health",
'Health planning': "Health Planning",
'Patient advocacy': "Patient Advocacy",
'Education': "Education",
'Social determinants of health': "Social Determinants of Health",
'Empowerment': "Empowerment",
'Awareness': "Awareness",
'Health literacy': "Health Literacy",
'Medical education': "Education, Medical",
'Information Dissemination': "Information Dissemination"
}
KEYWORDS_MESH_SUBHEADING = {
'Prevention and control': "prevention and control"
}
KEYWORDS_MESH_SITE_PROPOSITION = {
'Affordability': "Costs and Cost Analysis",
'Special populations': "Health Disparate Minority and Vulnerable Populations",
'Technological Development': "Sustainable Development",
'Self-monitoring': "Blood Glucose Self-Monitoring",
'Treatment management': "Patient Care Management",
'Healthcare policy': "Health Policy",
'National health policy': "Health Policy",
'Policy analysis': "Policy Making",
'Awareness campaign': "Immunization Programs",
'Civil society': "Social Justice",
'Parent education': "Prenatal Education",
'Educational materials': "Educational Technology",
'Community heatlh education': "Health Education"
}
KEYWORDS_MESH_PROPOSITION = {
'Availability': "Biological Availability",
'Care therapy': "Palliative Care",
'Care health': "Primary Health Care",
'Market': "marketing",
'Age': "Age Groups",
'Minority': "Minority Groups",
'Primary Care': "Primary Health Care",
'Patient acceptance': "Patient Acceptance of Health Care",
'Quality': "Quality Assurance, Health Care",
'Supplies': "Equipment and Supplies",
'Human Resources': "Workforce",
'Racial': "Racial Groups",
'Equity': "Diversity, Equity, Inclusion",
'Clinical': "ambulatory care facilities",
'Research & Innovation': "Research",
'Therapeutic Development': ["Therapeutics", "Growth and Development"],
'Integrated Care': "Delivery of Health Care, Integrated",
'Adherence': "Assessment of Medication Adherence",
'Control': "Control Groups",
'Rehabilitation services': "Rehabilitation",
'Clinical guidelines': ["ambulatory care facilities", "Guidelines as Topic"],
'Regional health policy': ["geographic locations", "Health Policy"],
'Health legislation': "Legislation as Topic",
'Policy evaluation': "policy",
'Policy formulation': "Policy Making",
'Regulation': "Social Control, Formal",
'Governance': "Clinical Governance",
'Coverage': "Preexisting Condition Coverage",
'Funding and investment': ["economics", "investments"],
'Health reform': "Health Care Reform",
'Public health campaign': "public health",
'Policy lobbying': ["policy", "lobbying"],
'Justice': "Social Justice",
'Community': "Residence Characteristics",
'Patient education': "Patient Education as Topic",
'Training program': "Education",
'Technology education': ["technology", "Education"],
'Medical devices': "Equipment and Supplies",
'Behavioral change': "behavior",
'Nutrition education': ["nutritional sciences", "Education"],
'Risk communication': ["risk", "communication"]
}
\ No newline at end of file
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment