Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adiciona spider para GO #144

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions web/spiders/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

from .spider_ce import Covid19CESpider
from .spider_es import Covid19ESSpider
from .spider_go import Covid19GOSpider
from .spider_pe import Covid19PESpider
from .spider_pr import Covid19PRSpider
from .spider_rn import Covid19RNSpider
Expand All @@ -14,6 +15,7 @@
SPIDERS = [
Covid19CESpider,
Covid19ESSpider,
Covid19GOSpider,
Covid19PESpider,
Covid19PRSpider,
Covid19RNSpider,
Expand Down
78 changes: 78 additions & 0 deletions web/spiders/spider_go.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import io
from itertools import groupby
from collections import defaultdict
from datetime import datetime

import rows

from .base import BaseCovid19Spider

class YMDDateField(rows.fields.DateField):
INPUT_FORMAT = "%Y%m%d"


class Covid19GOSpider(BaseCovid19Spider):
name = "GO"
start_urls = [
"http://datasets.saude.go.gov.br/coronavirus/obitos_confirmados.csv",
"http://datasets.saude.go.gov.br/coronavirus/casos_confirmados.csv"
]

def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.cases = defaultdict(dict)

def parse(self, response):
table = rows.import_from_csv(
io.BytesIO(response.body),
encoding=response.encoding,
force_types={"data_notificacao": YMDDateField},
)

table = [row for row in table]

# FIXME : make sure the REAL last date is used, since it will differ from obitos_confirmados.csv and casos_confirmados.csv
last_date = max(row.data_notificacao for row in table)
self.add_report(date=last_date, url=response.url)

row_key = lambda row: row.municipio
table.sort(key=row_key)

for city, city_data in groupby(table, key=row_key):
if "casos_confirmados.csv" in response.url:
self.cases[city]["confirmed"] = len(list(city_data))
elif "obitos_confirmados.csv" in response.url:
self.cases[city]["deaths"] = len(list(city_data))

def spider_closed(self):

total_confirmed = total_deaths = 0
imported_confirmed = imported_deaths = 0

for city, city_data in self.cases.items():
confirmed = city_data["confirmed"]
deaths = city_data.get("deaths", 0)

try:
self.get_city_id_from_name(city)
except KeyError:
imported_confirmed += confirmed
imported_deaths += deaths
else:
self.add_city_case(city=city, confirmed=confirmed, deaths=deaths)

total_confirmed += confirmed
total_deaths += deaths

if imported_confirmed == imported_deaths == 0:
imported_confirmed = imported_deaths = None

self.add_city_case(
city="Importados/Indefinidos",
confirmed=imported_confirmed,
deaths=imported_deaths,
)

self.add_state_case(confirmed=total_confirmed, deaths=total_deaths)

super().spider_closed(self)