mirror of
https://github.com/OpenJarbas/PySychonaut.git
synced 2024-11-22 11:19:24 +01:00
Initial commit
This commit is contained in:
commit
afe8209a64
50
example.py
Normal file
50
example.py
Normal file
@ -0,0 +1,50 @@
|
||||
from pyerowid import Erowid
|
||||
|
||||
trip_report = Erowid.random_experience()
|
||||
print trip_report.keys()
|
||||
|
||||
""" ['url', 'gender', 'age', 'experience', 'drug', 'year', 'date', 'exp_id', 'dosage'] """
|
||||
|
||||
trip_report = Erowid.get_experience(1)
|
||||
for key in trip_report:
|
||||
print key, ":", trip_report[key]
|
||||
|
||||
"""
|
||||
url : https://erowid.org/experiences/exp.php?ID=1
|
||||
gender : not specified
|
||||
age : not given
|
||||
15 minutes after this scary ordeal i begin to settle down, i layed down inside his warm house on the rug, touching and rubbing my hands like everywhere, everything was orgasmic feeling. I took a look at my eyes, i look like satan! it was so cool! Then after about and hour laying on the floor, other people come to his house who i dont even really know, but i just conversed with them with no feeling of stupidity, embarresment or consequences, very cool, i loved it, an hour later my eyes still were dialated but effects were over, it was fantastic
|
||||
drug : ecstasy
|
||||
year : 2000
|
||||
date : may 30, 2000
|
||||
exp_id : 1
|
||||
dosage : [{'substance': u'mdma', 'form': u'(pill / tablet)', 'method': u'oral', 'ammount': u'0.5 tablets', 'time': u't+ 0:00'}, {'substance': u'mdma', 'form': u'(pill / tablet)', 'method': u'oral', 'ammount': u'0.5 tablets', 'time': u't+ 0:45'}]
|
||||
|
||||
"""
|
||||
|
||||
chemicals = Erowid.get_chemicals()
|
||||
print chemicals[0].keys()
|
||||
names = [c["name"] for c in chemicals]
|
||||
print names
|
||||
|
||||
"""
|
||||
['url', 'other_names', 'name', 'effects']
|
||||
[u'absinthe', u'acetylfentanyl', u'adrenochrome', u'aet', u'alcohol', u'alpha-pvp', u'amitriptyline', u'amphetamine', u'amt', u'ayahuasca', u'barbiturates', u'bk-mbdb', u'bromo-dragonfly', u'bufotenin', u'bz', u'bzp', u'caffeine', u'cannabinoids', u'capsaicin', u'carbogen', u'cathinone', u'chloroform', u'chocolate', u'cocaine / crack', u'desomorphine', u'det', u'dipt', u'dmt', u'dob', u'doc', u'doi', u'dom', u'dpt', u'dxm', u'ephedrine', u'ether', u'ethylcathinone', u'ethylene', u'ethylphenidate', u'ghb', u'ghv', u'harmala', u'heroin', u'hypocretin', u'iap', u'ibogaine', u'inhalants', u'ketamine', u'lsa', u'lsd', u'lsz', u'maois', u'mbdb', u'mcpp', u'mda', u'mde', u'mdai', u'mdma', u'mdpr', u'mdpv', u'mescaline', u'met', u'methadone', u'methamphetamines', u'methaqualone', u'methiopropamine', u'methoxetamine', u'methoxphenidine', u'methylone', u'mipt', u'mptp', u'nbome', u'nicotine', u'nitrous', u'opiates', u'opium', u'petroleum ether', u'piperazines', u'pcp', u'pma', u'pmma', u'psilocybin & psilocin', u'salvinorin b ethoxymethyl ether', u'scopolamine', u'"spice" product', u'ssris', u'tfmpp', u'thc', u'tma-2', u'toad venom', u'1,4-butanediol', u'2-aminoindan', u'2c-b', u'2c-b-fly', u'2c-c', u'2c-c-nbome', u'2c-d', u'2c-e', u'2c-i', u'2c-i-nbome', u'2c-p', u'2c-t-2', u'2c-t-4', u'2c-t-7', u'2c-t-21', u'3c-p', u'3-meo-pcp', u'4-acetoxy-det', u'4-acetoxy-dipt', u'4-acetoxy-dmt', u'4-acetoxy-mipt', u'4-fluoroamphetamine', u'4-fluoromethcathinone', u'4-hydroxy-dipt', u'4-hydroxy-met', u'4-hydroxy-mipt', u'4-hydroxy-mpt', u'4-meo-pcp', u'4-methylaminorex', u'4-methylmethcathinone', u'4-methylethcathinone', u'4-mta', u'5-it', u'5-meo-amt', u'5-meo-dalt', u'5-meo-dmt', u'5-meo-dipt', u'5-meo-mipt', u'6-apb', u'other chemicals']
|
||||
|
||||
"""
|
||||
|
||||
chem_data = Erowid.parse_page("https://erowid.org/chemicals/lsd/lsd.shtml")
|
||||
for key in chem_data:
|
||||
print key, ":", chem_data[key]
|
||||
|
||||
"""
|
||||
info : {'basics': 'https://erowid.org/chemicals/lsd/lsd_basics.shtml', 'dose': 'https://erowid.org/chemicals/lsd/lsd_dose.shtml', 'health': 'https://erowid.org/chemicals/lsd/lsd_health.shtml', 'effects': 'https://erowid.org/chemicals/lsd/lsd_effects.shtml', 'images': 'https://erowid.org/chemicals/lsd/lsd_images.shtml', 'law': 'https://erowid.org/chemicals/lsd/lsd_law.shtml', 'chemistry': 'https://erowid.org/chemicals/lsd/lsd_chemistry.shtml'}
|
||||
picture : https://erowid.org/chemicals/lsd/images/lsd_summary1.jpg
|
||||
name : lsd-25
|
||||
url : https://erowid.org/chemicals/lsd/
|
||||
other_names : [u'acid', u'l', u'tabs', u'blotter', u'doses', u'trips']
|
||||
effects : Psychedelic
|
||||
chem_name : d-lysergic acid diethylamide
|
||||
description : LSD is the best known and most researched psychedelic. It is the standard against which all other psychedelics are compared. It is active at extremely low doses and is most commonly available on blotter or in liquid form.
|
||||
|
||||
"""
|
162
pyerowid/__init__.py
Normal file
162
pyerowid/__init__.py
Normal file
@ -0,0 +1,162 @@
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
import random
|
||||
|
||||
|
||||
__author__ = "jarbasAI"
|
||||
|
||||
|
||||
class Erowid(object):
|
||||
|
||||
@staticmethod
|
||||
def extract_experience_text(text):
|
||||
try:
|
||||
begin_delimiter = '<!-- Start Body -->'
|
||||
begin = text.index(begin_delimiter) + len(begin_delimiter)
|
||||
end = text.index('<!-- End Body -->')
|
||||
return text[begin:end].strip().replace("<BR>", "\n").replace("<br>", "\n").replace("\n\n", " ")
|
||||
except ValueError:
|
||||
return ''
|
||||
|
||||
@staticmethod
|
||||
def _extract_list(base_url):
|
||||
response = requests.get(base_url).text
|
||||
soup = BeautifulSoup(response, "lxml")
|
||||
table = soup.find('table', {'class': 'topic-chart-surround'})
|
||||
categories = table.find_all("tr", {'class': 'topic-surround'})[1:]
|
||||
fields = []
|
||||
for cat in categories:
|
||||
chem_data = {}
|
||||
name = cat.find("td", {'class': 'topic-name'})
|
||||
chem_data["url"] = base_url + name.find("a")["href"]
|
||||
chem_data["name"] = name.getText().strip().lower()
|
||||
chem_data["other_names"] = cat.find("td", {'class': 'topic-common'}).getText().strip().lower()
|
||||
chem_data["effects"] = cat.find("td", {'class': 'topic-desc'}).getText().strip().lower()
|
||||
fields.append(chem_data)
|
||||
return fields
|
||||
|
||||
@staticmethod
|
||||
def get_experience(exp_id):
|
||||
base_url = 'https://erowid.org/experiences/exp.php'
|
||||
url = base_url+"?ID="+str(exp_id)
|
||||
data = {"exp_id": exp_id, "url": url}
|
||||
try:
|
||||
|
||||
response = requests.get(url).text
|
||||
experience = Erowid.extract_experience_text(response)
|
||||
|
||||
soup = BeautifulSoup(response, "lxml")
|
||||
drug = soup.find('div', {'class': 'substance'}).getText().strip().lower().replace("/", ", ")
|
||||
experience_data = soup.find('table', {'class': 'footdata'}).getText().strip().lower().split("\n")
|
||||
data["drug"] = drug
|
||||
data["experience"] = experience
|
||||
data["year"] = experience_data[0].split("expid:")[0].replace("exp year: ", "").strip()
|
||||
data["gender"] = experience_data[1].replace("gender: ", "").strip()
|
||||
data["age"] = experience_data[2].replace("age at time of experience: ", "").strip()
|
||||
data["date"] = experience_data[3].replace("published: ", "").split("views:")[0].strip()
|
||||
data["dosage"] = []
|
||||
|
||||
dosage_table = soup.find('table', {'class': 'dosechart'})
|
||||
ts = dosage_table.find_all("td", {'align': 'right'})
|
||||
ammount = dosage_table.find_all("td", {'class': 'dosechart-amount'})
|
||||
method = dosage_table.find_all("td", {'class': 'dosechart-method'})
|
||||
substance= dosage_table.find_all("td", {'class': 'dosechart-substance'})
|
||||
form = dosage_table.find_all("td", {'class': 'dosechart-form'})
|
||||
for i in range(len(ts)):
|
||||
dosage_data = {}
|
||||
dosage_data["time"] = ts[i].getText().lower().replace("dose:", "").strip()
|
||||
dosage_data["ammount"] = ammount[i].getText().strip().lower()
|
||||
dosage_data["method"] = method[i].getText().strip().lower()
|
||||
dosage_data["substance"] = substance[i].getText().strip().lower()
|
||||
dosage_data["form"] = form[i].getText().strip().lower()
|
||||
data["dosage"].append(dosage_data)
|
||||
except Exception as e:
|
||||
return None
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def get_categories():
|
||||
base_url = 'https://erowid.org/experiences/exp_list.shtml'
|
||||
response = requests.get(base_url).text
|
||||
categories = []
|
||||
for sub in response.split("<!-- Start ")[1:]:
|
||||
sub = sub[:sub.find(" -->")]
|
||||
categories.append(sub)
|
||||
return categories
|
||||
|
||||
@staticmethod
|
||||
def get_chemicals():
|
||||
base_url = 'https://erowid.org/chemicals/'
|
||||
return Erowid._extract_list(base_url)
|
||||
|
||||
@staticmethod
|
||||
def get_plants():
|
||||
base_url = 'https://erowid.org/plants/'
|
||||
return Erowid._extract_list(base_url)
|
||||
|
||||
@staticmethod
|
||||
def get_herbs():
|
||||
base_url = 'https://erowid.org/herbs/'
|
||||
return Erowid._extract_list(base_url)
|
||||
|
||||
@staticmethod
|
||||
def get_smarts():
|
||||
base_url = 'https://erowid.org/smarts/'
|
||||
return Erowid._extract_list(base_url)
|
||||
|
||||
@staticmethod
|
||||
def get_animals():
|
||||
base_url = 'https://erowid.org/animals/'
|
||||
return Erowid._extract_list(base_url)
|
||||
|
||||
@staticmethod
|
||||
def parse_page(url):
|
||||
base_url = url
|
||||
if ".shtml" in base_url:
|
||||
base_url = "/".join(base_url.split("/")[:-1]) + "/"
|
||||
data = {"url": base_url}
|
||||
response = requests.get(url).text
|
||||
soup = BeautifulSoup(response, "lxml")
|
||||
data["name"] = soup.find('div', {'class': 'title-section'}).getText().strip().lower()
|
||||
picture = soup.find('div', {'class': "summary-card-topic-image"}).find("img")
|
||||
if picture:
|
||||
picture = base_url + picture["src"]
|
||||
else:
|
||||
picture = ""
|
||||
data["picture"] = picture
|
||||
data["other_names"] = [n.strip().lower() for n in soup.find('div', {'class': 'sum-common-name'}).getText().split(";")]
|
||||
data["description"] = soup.find('div', {'class': "sum-description"}).getText()
|
||||
info = soup.find('div', {'class': "summary-card-icon-surround"}).find_all("a")
|
||||
urls = {}
|
||||
for i in info:
|
||||
url = base_url + i["href"]
|
||||
name = i.find("img")["alt"].strip().lower()
|
||||
urls[name] = url
|
||||
data["info"] = urls
|
||||
if "/chem" in url or "/pharms" in url or "/smarts" in url:
|
||||
data["chem_name"] = soup.find('div', {'class': "sum-chem-name"}).getText()
|
||||
data["effects"] = soup.find('div', {'class': "sum-effects"}).getText()
|
||||
elif "/animals" in url or "/plants" in url:
|
||||
animal_data = soup.find_all('div', {'class': "fgs-row"})
|
||||
data["family"] = animal_data[0].find('div', {'class': "family"}).getText()
|
||||
data["genus"] = animal_data[1].find('div', {'class': "genus"}).getText()
|
||||
data["species"] = animal_data[2].find('div', {'class': "species"}).getText()
|
||||
data["effects"] = soup.find('div', {'class': "sum-effects"}).getText()
|
||||
elif "/herbs" in url:
|
||||
animal_data = soup.find_all('div', {'class': "fgs-row"})
|
||||
data["family"] = animal_data[0].find('div', {'class': "family"}).getText()
|
||||
data["genus"] = animal_data[1].find('div', {'class': "genus"}).getText()
|
||||
data["species"] = animal_data[2].find('div', {'class': "species"}).getText()
|
||||
data["uses"] = soup.find('div', {'class': "sum-uses"}).getText()
|
||||
|
||||
return data
|
||||
|
||||
@staticmethod
|
||||
def random_experience():
|
||||
exp = None
|
||||
while exp is None:
|
||||
exp = Erowid.get_experience(random.randint(1, 111451))
|
||||
return exp
|
||||
|
||||
|
||||
|
13
setup.py
Normal file
13
setup.py
Normal file
@ -0,0 +1,13 @@
|
||||
from setuptools import setup
|
||||
|
||||
setup(
|
||||
name='py_erowid',
|
||||
version='0.1',
|
||||
packages=['pyerowid'],
|
||||
url='https://github.com/JarbasAl/py_erowid',
|
||||
license='MIT',
|
||||
author='jarbasAI',
|
||||
author_email='jarbasai@mailfence.com',
|
||||
description='unnoficial erowid api',
|
||||
install_requires=["lxml", "bs4"]
|
||||
)
|
Loading…
Reference in New Issue
Block a user