Skip to content

Commit

Permalink
initial upload
Browse files Browse the repository at this point in the history
  • Loading branch information
berthayes committed Apr 8, 2021
0 parents commit e8fd431
Show file tree
Hide file tree
Showing 17 changed files with 1,088 additions and 0 deletions.
48 changes: 48 additions & 0 deletions CTA_bustracker.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import requests
import json

# Uses the CTA Bus Tracker API (documentation available here):
# https://www.transitchicago.com/assets/1/6/cta_Bus_Tracker_API_Developer_Guide_and_Documentation_20160929.pdf

# Given a list of Route Designators ('rt' below), return Vehicle IDs
# JSON response will be most recent status for each vehicle

# Take that JSON response and send it Confluent Kafka REST Proxy

# TODO: wrap this in a loop so it runs every minute or so (check AUP)

# Confluent REST Proxy values
rest_proxy_host = "ec2-52-14-116-180.us-east-2.compute.amazonaws.com"
topic = "bustest"
rest_headers = {'Content-Type': 'application/vnd.kafka.json.v2+json', 'Accept': 'application/vnd.kafka.v2+json'}
rest_url = "http://" + rest_proxy_host + ":8082/topics/" + topic

# CTA Bus Tracker API values
api_key = 'HL76TdXUt6YZGmTKSziz3qXyT'
getvehicles_url = 'http://ctabustracker.com/bustime/api/v2/getvehicles'

# Format the API request and parse the response
vehicle_params = {'key': api_key, 'format': 'json', 'rt': 'X9,11,12,J14,15,18,19,20,21,22', 'tmres': 's'}
r_vehicles = requests.get(getvehicles_url, params=vehicle_params)
# each JSON object is the latest stats for each vehicle ID (bus).
response_dict = r_vehicles.json()
vehicle_dict = response_dict['bustime-response']
list_of_vids = vehicle_dict['vehicle']


for vid in list_of_vids:
# each vid is a dict
list_of_records = []
kafka_record = {}
kafka_record['value'] = vid
# use the vehicle ID - vid as the key for each record
kafka_record['key'] = vid["vid"]
list_of_records.append(kafka_record)
send_data = {}
send_data['records'] = list_of_records
send_json = json.dumps(send_data)
print(send_json)
# POST to the REST API
kpost = requests.post(rest_url, headers=rest_headers, data=send_json)
print(kpost.text)
# time.sleep(10)
2 changes: 2 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
# iot
A turnkey environment for sending IoT data to Confluent Cloud
134 changes: 134 additions & 0 deletions all.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
---
- name: Kafka Provisioning
hosts: all
remote_user: root
gather_facts: false
tags: base_install
tasks:
- name: apt-get update and upgrade
apt:
update_cache: yes
upgrade: full
force_apt_get: yes
- name: install java and docker
apt:
name: openjdk-11-jdk, jq, docker, docker-compose, python3, python3-pip
state: latest
force_apt_get: yes
- name: Installing Python Modules
pip:
name:
- ruamel.yaml, yq, psutil
- name: Adding user ubuntu to Docker group
ansible.builtin.user:
name: ubuntu
append: true
groups: docker
- name: Configure Confluent REST Proxy
hosts: rest_proxy
remote_user: root
gather_facts: false
tags: kafka_rest
tasks:
- name: Set the hostname to rest-proxy
ansible.builtin.hostname:
name: rest-proxy
- name: Copying create_rest_proxy_docker_compose.py
copy:
src: create_rest_proxy_docker_compose.py
dest: /home/ubuntu/create_rest_proxy_docker_compose.py
- name: Copying ccloud-kafka-rest.properties
copy:
src: ccloud-kafka-rest.properties
dest: /home/ubuntu/ccloud-kafka-rest.properties
- name: Creating docker-compose.yml file
script: create_rest_proxy_docker_compose.py
args:
executable: python3
- name: Starting Docker
shell:
cmd: docker-compose -f rest-docker-compose.yml up -d
- name: Configure MQTT Source Connector
hosts: kafka_connect_mqtt
remote_user: root
gather_facts: false
tags: kafka_connect
tasks:
- name: Set the hostname to mqtt-connect
ansible.builtin.hostname:
name: mqtt-connect
- name: Copying create_mqtt_connect_docker_compose_dot_yaml.py
copy:
src: create_mqtt_connect_docker_compose_dot_yaml.py
dest: /home/ubuntu/create_mqtt_connect_docker_compose_dot_yaml.py
- name: Copying my-connect-distributed.properties
copy:
src: my-connect-distributed.properties
dest: /home/ubuntu/my-connect-distributed.properties
- name: Creating docker-compose.yml file
script: create_mqtt_connect_docker_compose_dot_yaml.py
args:
executable: python3
- name: Starting Docker
shell:
cmd: docker-compose -f docker-compose.yml up -d
- name: Configure the Prometheus Sink Connector
hosts: kafka_connect_prometheus
remote_user: root
gather_facts: false
tags: kafka_connect_prometheus
tasks:
- name: Set the hostname to prometheus-connect
ansible.builtin.hostname:
name: prometheus-connect
- name: Copying my-connect-distributed.properties to prometheus connect
copy:
src: my-connect-distributed.properties
dest: my-connect-distributed.properties
- name: Copying create_prometheus_connect_docker_compose_dot_yaml.py
copy:
src: create_prometheus_connect_docker_compose_dot_yaml.py
dest: /home/ubuntu/create_prometheus_connect_docker_compose_dot_yaml.py
- name: Creating docker-compose.yml file
script: create_prometheus_connect_docker_compose_dot_yaml.py
args:
executable: python3
- name: Starting Docker
shell:
cmd: docker-compose -f docker-compose.yml up -d
- name: Configure the Prometheus Server
hosts: prometheus_server
remote_user: root
gather_facts: false
tags: prometheus_server
tasks:
- name: Set the hostname to prometheus-server
ansible.builtin.hostname:
name: prometheus-server
- name: Copying hosts.yml
copy:
src: hosts.yml
dest: /home/ubuntu/hosts.yml
- name: Copying prom_server_default.yml
copy:
src: prom_server_default.yml
dest: prom_server_default.yml
- name: Copying make_prometheus_server_config.py
copy:
src: make_prometheus_server_config.py
dest: make_prometheus_server_config.py
- name: Creating prometheus.yml
script: make_prometheus_server_config.py
args:
executable: python3
- name: installing prometheus
apt:
name: prometheus
state: latest
force_apt_get: yes
- name: moving Prometheus.yml config file
command: mv /home/ubuntu/prometheus.yml /etc/prometheus/prometheus.yml
- name: starting prometheus server
ansible.builtin.command:
cmd: /usr/bin/prometheus --config.file="/home/ubuntu/prometheus.yml"

46 changes: 46 additions & 0 deletions cpu2mqtt.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# cpu2mqtt - a simple script to read cpu metrics using
# the psutil library, then send them to an mqtt broker

# TODO: pull some of these configs from a config file

import paho.mqtt.client as paho
#import paho.mqtt.publish as publish
import socket
import psutil
import time
import json

hostname = socket.gethostname()

def on_connect(client, userdata, flags, rc):
print("CONNACK received with code ")
print(rc)

def on_publish(client, userdata, mid):
print("mid: " + str(mid))

client = paho.Client(client_id=hostname, clean_session=True, userdata=None, protocol=paho.MQTTv31)
client.on_connect = on_connect
client.connect("broker.hivemq.com", 1883)


client.on_publish = on_publish
client.loop_start()

while True:
# cpu_times_percent is the percent
#(rc, mid) = client.publish(“encyclopedia / temperature”, str(temperature), qos = 1)
cpu_times_percent = psutil.cpu_times_percent(interval=1, percpu=False)
cpu_stats = {}
cpu_stats["timestamp"] = time.time()
cpu_stats["user"] = cpu_times_percent.user
cpu_stats["nice"] = cpu_times_percent.nice
cpu_stats["idle"] = cpu_times_percent.idle
cpu_stats["system"] = cpu_times_percent.system
print(cpu_stats)
payload = json.dumps(cpu_stats)
print(payload)
mqtt_topic = hostname + "/cpu_stats"
print(mqtt_topic)
(rc, mid) = client.publish(mqtt_topic, payload=payload)
time.sleep(30)
Loading

0 comments on commit e8fd431

Please sign in to comment.