## New Ticker

News Ticker
Epstien didn't kill himself.
Epstien didn't kill himself.
Epstien didn't kill himself.
Epstien didn't kill himself.

## Sunday, August 7, 2022

### How to calculate I2C addresses on a Raspberry Pi using Python

Occasionally when programming on a RP2040 it's necessary to identify the port an I2C address is using. This helps..

import machine

sda=machine.Pin(0)

scl=machine.Pin(1)

i2c=machine.I2C(0,sda=sda, scl=scl, freq=400000)

print(i2c.scan(),' (decimal)')

print(hex(i2c.scan()), ' (hex)')

## Saturday, August 6, 2022

### Calculating distances between grid coordinates

Calculating distances is more challenging than compass directions. There are three methods. One is the geometric method which is accurate enough but needs to be recalculated every 50 miles for real accuracy. Another is the Haversine method which is accurate to within about 10% and which was used for well over a hundred years. The best, however is the Vincenty method which is accurate to within 0.5mm.

Here's code comparing all 3 methods...

from vincenty import vincenty

import math

# Python 3 program for the

# haversine formula

def haversine(lat1, lon1, lat2, lon2):

# distance between latitudes

# and longitudes

dLat = (lat2 - lat1) * math.pi / 180.0

dLon = (lon2 - lon1) * math.pi / 180.0

lat1 = (lat1) * math.pi / 180.0

lat2 = (lat2) * math.pi / 180.0

# apply formulae

a = (pow(math.sin(dLat / 2), 2) +

pow(math.sin(dLon / 2), 2) *

math.cos(lat1) * math.cos(lat2));

c = 2 * math.asin(math.sqrt(a))

# Driver code

if __name__ == "__main__":

lat1 = 51.5081

lon1 = 0.0759

lat2 = 48.8738

lon2 = -2.2950

print("Haversine distance in miles ",haversine(lat1, lon1,lat2, lon2)/1.6)

# This code is contributed

# by ChitraNayal

LocationOneV = lon1

LocationTwoV = lon2

LocationOneH = lat1

LocationTwoH = lat2

#My own version of distance created using the flat earth model

#Distance calculation

DistanceV = (LocationOneV - LocationTwoV) *54.6

DistanceH = (LocationOneH - LocationTwoH) *68.7

DistanceCalc = (DistanceV*DistanceV)+(DistanceH*DistanceH)

ActualDistance = math.sqrt(DistanceCalc)

print("Distance in Miles using flat earth calculation ", ActualDistance)

TowerOfLondon = (51.5081, 0.0759)

ArcDeTriomph = (48.873756, 2.294946)

print("Vincenty miles ", vincenty(TowerOfLondon, ArcDeTriomph , miles=True))

This, of course, required the Vincenty library which is here:

import math

# WGS 84

a = 6378137  # meters

f = 1 / 298.257223563

b = 6356752.314245  # meters; b = (1 - f)a

MILES_PER_KILOMETER = 0.621371

MAX_ITERATIONS = 200

CONVERGENCE_THRESHOLD = 1e-12  # .000,000,000,001

def vincenty_inverse(point1, point2, miles=False):

"""

Vincenty's formula (inverse method) to calculate the distance (in

kilometers or miles) between two points on the surface of a spheroid

Doctests:

>>> vincenty((0.0, 0.0), (0.0, 0.0))  # coincident points

0.0

>>> vincenty((0.0, 0.0), (0.0, 1.0))

111.319491

>>> vincenty((0.0, 0.0), (1.0, 0.0))

110.574389

>>> vincenty((0.0, 0.0), (0.5, 179.5))  # slow convergence

19936.288579

>>> vincenty((0.0, 0.0), (0.5, 179.7))  # failure to converge

>>> boston = (42.3541165, -71.0693514)

>>> newyork = (40.7791472, -73.9680804)

>>> vincenty(boston, newyork)

298.396057

>>> vincenty(boston, newyork, miles=True)

185.414657

"""

# short-circuit coincident points

if point1 == point2 and point1 == point2:

return 0.0

U1 = math.atan((1 - f) * math.tan(math.radians(point1)))

U2 = math.atan((1 - f) * math.tan(math.radians(point2)))

Lambda = L

sinU1 = math.sin(U1)

cosU1 = math.cos(U1)

sinU2 = math.sin(U2)

cosU2 = math.cos(U2)

for iteration in range(MAX_ITERATIONS):

sinLambda = math.sin(Lambda)

cosLambda = math.cos(Lambda)

sinSigma = math.sqrt((cosU2 * sinLambda) ** 2 +

(cosU1 * sinU2 - sinU1 * cosU2 * cosLambda) ** 2)

if sinSigma == 0:

return 0.0  # coincident points

cosSigma = sinU1 * sinU2 + cosU1 * cosU2 * cosLambda

sigma = math.atan2(sinSigma, cosSigma)

sinAlpha = cosU1 * cosU2 * sinLambda / sinSigma

cosSqAlpha = 1 - sinAlpha ** 2

try:

cos2SigmaM = cosSigma - 2 * sinU1 * sinU2 / cosSqAlpha

except ZeroDivisionError:

cos2SigmaM = 0

C = f / 16 * cosSqAlpha * (4 + f * (4 - 3 * cosSqAlpha))

LambdaPrev = Lambda

Lambda = L + (1 - C) * f * sinAlpha * (sigma + C * sinSigma *

(cos2SigmaM + C * cosSigma *

(-1 + 2 * cos2SigmaM ** 2)))

if abs(Lambda - LambdaPrev) < CONVERGENCE_THRESHOLD:

break  # successful convergence

else:

return None  # failure to converge

uSq = cosSqAlpha * (a ** 2 - b ** 2) / (b ** 2)

A = 1 + uSq / 16384 * (4096 + uSq * (-768 + uSq * (320 - 175 * uSq)))

B = uSq / 1024 * (256 + uSq * (-128 + uSq * (74 - 47 * uSq)))

deltaSigma = B * sinSigma * (cos2SigmaM + B / 4 * (cosSigma *

(-1 + 2 * cos2SigmaM ** 2) - B / 6 * cos2SigmaM *

(-3 + 4 * sinSigma ** 2) * (-3 + 4 * cos2SigmaM ** 2)))

s = b * A * (sigma - deltaSigma)

s /= 1000  # meters to kilometers

if miles:

s *= MILES_PER_KILOMETER  # kilometers to miles

return round(s, 6)

vincenty = vincenty_inverse

if __name__ == '__main__':

import doctest

doctest.testmod()

## Friday, August 5, 2022

### Direction given two grid coordinates

This is the code to calculate a compass bearing given two geographical coordinates.

#!/usr/bin/env python3

# -*- coding: utf-8 -*-

"""

Created on Tue Jul 19 12:04:26 2022

"""

import math

DiameterOfEarth = 7917.5

#Assumung two fixed point - just for the sake of making life easy...

#North = positive values. South = negative values

#West = Positive values, East = negative values

LocationOneV = 51.5081

LocationOneH = 0.0759

#Tower of London

LocationTwoV = 48.8738

LocationTwoH = -2.2950

#Arc de Triomph

X = math.cos(LocationTwoV)*math.sin(LocationOneH-LocationTwoH)

Y = (math.cos(LocationOneV)*math.sin(LocationTwoV)-math.sin(LocationOneV)*

math.cos(LocationTwoV)*math.cos(LocationOneH-LocationTwoH))

print ("Degrees", Degrees)

That was pretty easy, wasn't it?

## Wednesday, August 3, 2022

### Navigation - how to do it with a drone

First off, I'll apologize for writing a dull and tedious-looking article. But now I have, let's get started.

We're going to imagine we have a drone of unspecified range - possibly enough to go half way around the world. To do that we will need to know several things.

• The altitude we will fly at - will it be a fixed altitude or a variable altitude?
• Where we left from.
• Where we're going.
• Any waypoints along the route.
The altitude is a tricky one - the highest mountain in the world is Mount Everest at 29,000 feet or thereabouts. That means that to be truly free of hitting ground based obstacles, a height of 30,000 feet is needed. Because planes use 30,000 feet perhaps a higher altitude so as not to bump into any. That takes us to 35,000 feet or possibly 40,000 feet. That airspace should be relatively free aside from America's U2 spy planes on the way up or down to 60,000 feet.

Being so high really isn't that practical for most purposes. Sure - if the drone is solar-powered or even nuclear powered and aimed at long duration flight for weeks or months then being high has great advantages. For the vast majority of flights, being low has its advantages. For this reason we're going to assume we're flying a drone to deliver aid into a hostile land where cargo planes get shot down.

For a flight like that, staying below radar is a pretty good idea. Thus our altitude will be around 80 feet above the ground. How do we calculate that we are 80 feet above the ground and what do we do about skyscrapers? That's all pretty straightforward. We don't need to stay consistently at 80 feet, we can go higher to skip over obstacles or even just go around them.

Calculating height off the ground has its challenges. An altimeter is good but only calculates height above sea level and is skewed by air pressure changes due to weather. GPS satellites also provide altitude calculations but as before - only altitude above sea level. The world is not level - there are mountains. Thus we need to resort to one of three known methods.
1. We can use radar to tell the distance from the ground - good stuff but radar can be picked up by hostiles and used for target aquisition.
2. We can use sonar - this is how the cruise missile used to do it. It works pretty well - send a wide sweep of sound and listen for the time difference between sending and receiving the pings. This does reduce maximum speed somewhat as the ping has to get back in order to judge altittude.
3. Lazer - not a bad method though most lazer rangefinders seem to be very limited in range. There's also the question as to how it'd work over water.
If we relied on GPS for altitude there would be an issue if it was jammed as is likely to happen over hostile territory. Even including Glonass (Russian), Baidu (Chinese) or Gallileo (European) as well as GPS (American) won't help much because the signals are all weak and easy to overwhelm.

Where we left from is a pretty easy thing. That can be keyed in manually or read from GPS. As long as we have that coordinate we are well on the way to navigating. The next step is to know the destination. I'll skip waypoints as it's just a case of treating the last waypoint as the origin position and the next one as the destination.

Knowing where we're coming from and where we're going to are vitally important. We can ignore altitude for now though that will come back into play if we actually built a drone.

To get from the origin to the destination we need to know the direction in which to head. This is calculated using the Haversine method. That gives us the bearing we need:
Formula: θ = atan2( sin Δλ ⋅ cos φ2 , cos φ1 ⋅ sin φ2 − sin φ1 ⋅ cos φ2 ⋅ cos Δλ )
where φ1,λ1 is the start point, φ2,λ2 the end point (Δλ is the difference in longitude)
At the start of the mission it's probably also a good idea to record the difference between magnetic north and true north so that in the event of satellites being unavailable, navigation can be carried out using heading and flight duration versus speed.

Knowing the distance between the start and the finish are very important too. That combined with a flight timer allows us to calculate in the event of no satellites, where to deliver the payload. This is calculated with the Vincenty method.

Between two nearly antipodal points, the iterative formula may fail to converge; this will occur when the first guess at λ as computed by the equation above is greater than π in absolute value.

That gives a distance accurate to within 0.5mm between any two points on the earth's surface.

I'll leave it there because your mind is probably reeling right now. At some future point I'll talk about Kalman equations needed to turn an acelerometer from a gaming toy to a serious device that helps a drone fly level.

## Sunday, May 1, 2022

### Morse Code Generator

This is a Morse Code generator project using a Raspberry Pi Pico. Two breadboards are used - one has a 1k resistor, 3v buzzer and a PNP transistor (it doesn't matter which one). Some work still needs to be done on the code timing.

from machine import Pin, Timer

import utime

led = Pin(25, Pin.OUT)

buzzer = Pin(15, Pin.OUT)

utimer = Timer()

#led.value(1)

#buzzer.value(1)

#utime.sleep(0.5)

led.value(0)

buzzer.value(0)

MasterTime = 0.05

Dash = (MasterTime * 3)

Dot = MasterTime

Space = (MasterTime * 7)

CharSpace = (MasterTime * 3)

InterCharSpace = MasterTime

def SendLetter(Letter: str) -> None:

led.value(0)

MaxLength = len(Letter)

for MorseCounter in range (0,MaxLength):

if Letter[MorseCounter] == '-':

led.value(1)

buzzer.value(1)

utime.sleep(Dash)

buzzer.value(0)

led.value(0)

else:

led.value(1)

buzzer.value(1)

utime.sleep(Dot)

buzzer.value(0)

led.value(0)

utime.sleep(InterCharSpace)

#Morse Code

morse = {

'a': '.-',

'b': '-...',

'c': '-.-.',

'd': '-..',

'e': '.',

'f': '..-.',

'g': '--.',

'h': '--.',

'i': '..',

'j': '.---',

'k': '-.-',

'l': '.-..',

'm': '--',

'n': '-.',

'o': '---',

'p': '.--.',

'q': '--.-',

'r': '.-.',

's': '...',

't': '-',

'u': '..-',

'v': '...-',

'w': '.--',

'x': '-..-',

'y': '-.--',

'z': '--..',

'0': '-----',

'1': '.----',

'2': '..---',

'3': '...--',

'4': '....-',

'5': '.....',

'6': '-....',

'7': '--...',

'8': '---..',

'9': '----.',

'&': '.-...',

"'": '.----.',

'@': '.--.-.',

')': '-.--.-',

'(': '-.--.',

':': '---...',

',': '--..--',

'=': '-...-',

'!': '-.-.--',

'.': '.-.-.-',

'-': '-....-',

'*': '-..-',

'%': '------..-.-----',

'+': '.-.-.',

'"': '.-..-.',

'?': '..--..',

'/': '-..-.',

}

char: str

MessageToSend: str = "In 1814, we took a little trip Along with Colonel Jackson down the mighty Mississip'"

MessageToSend = MessageToSend.lower()

for char in MessageToSend:

if char != ' ':

SendLetter(morse.get(char))

utime.sleep(CharSpace)

print(f'{char} {morse.get(char)}')

else:

print('space')

utime.sleep(Space)

## Sunday, April 10, 2022

### A quickie still photo project

from picamera import PiCamera
from time import sleep
camera = PiCamera()
#camera.start_preview()
#sleep(5)
camera.capture('/home/pi/Desktop/imageWithNoPreview.jpg')
#camera.stop_preview()

With the preview to the camera and the sleep commented out, the image produced is significantly fuzzier than the image produced with the time delay and preview functioning, as evidenced by the two photos.

This poses a negligible problem for most people who are using Raspberry Pi just for experiments. For those that want to do it more seriously, setting the camera going a few seconds before it is used is a great advantage despite the increase in power consumption.

Camera used UV Pi camera
Pi used- 3B+
Software: Raspbian Buster, Thonny

## Saturday, March 19, 2022

### Motions sensitive imaging using a Raspberry Pi

One of the interesting things one can do, in this case using a Raspberry Pi and an infrared camera is to take motion-activatedpictures. It's possible to do video too and stills but for this example it's motion-sensitive images.

The GIF shown is made of 20 separate images as a hand was moved in front of the camera. This is definitely not a video. The code to do this is shown below and was written by somebody from MIT.

The original code has been modified to display on the screen, the filename used. Just to avoid confusion, the images are stored in a desktop folder called "NAS".

This does show rather the limitations of the Raspberry Pi in that between each photo it really does need 2 -3 seconds to read the brightness of a scene. Having said that, the Pi is an excellent little computer that can be used to develop proof of concept applications.

Something to have fun with.

#! /usr/bin/env python3

#
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.

import io
import os
import time
import picamera
import numpy as np

widthH = 2592 # too slow for motion check, only use for the save sequence
heightH = 1944
width = 1440 # use lower resolution for motion check
height = 1080

threshold = 30 # how much must the color value (0-255) change to be considered a change
minPixelsChanged = width * height * 2 / 100 # % change  # how many pixels must change to begin a save sequence
print("minPixelsChanged=",minPixelsChanged) # debug

print ('Creating in-memory stream')
stream = io.BytesIO()
step = 1  # use this to toggle where the image gets saved
numImages = 1 # count number of images processed
captureCount = 0 # flag used to begin a sequence capture

# function to generate a sequence of 20 filenames for the picamera capture_sequence command to use
def filenames():
frame = 0
fileName = time.strftime("NAS/%Y%m%d/%Y%m%d-%H%M%S-",time.localtime())
while frame < 20:
yield '%s%02d.jpg' % (fileName, frame)
frame += 1

# begin monitoring
with picamera.PiCamera() as camera:
time.sleep(1) # let camera warm up
try:
while threshold > 0:
camera.resolution = (1440,1080) # use a smaller resolution for higher speed compare

print ('Capture ' , numImages)
if step == 1:
stream.seek(0)
camera.capture(stream, 'rgba',True) # use video port for high speed
data1 = np.fromstring(stream.getvalue(), dtype=np.uint8)
step = 2
else:
stream.seek(0)
camera.capture(stream, 'rgba',True)
data2 = np.fromstring(stream.getvalue(), dtype=np.uint8)
step = 1
numImages = numImages + 1

if numImages > 4:  # ignore first few images because if the camera is not quite ready it will register as motion right away
# look for motion unless we are in save mode
if captureCount <= 0:
print("Compare")
# not capturing, test for motion (very simplistic, but works good enough for my purposes)
data3 = np.abs(data1 - data2)  # get difference between 2 successive images
numTriggers = np.count_nonzero(data3 > threshold) / 4 / threshold #there are 4 times the number of pixels due to rgba

print("Trigger cnt=",numTriggers)

if numTriggers > minPixelsChanged:
captureCount = 1 # capture ? sequences in a row
# make sure directory exists for today
d = time.strftime("NAS/%Y%m%d") #unfortunately this saves as UTC time instead of local, will fix it later sorry
if not os.path.exists(d):
os.makedirs(d)

if captureCount > 0:
# in capture mode, save an image in hi res
camera.resolution = (widthH,heightH)
dtFileStr = time.strftime("NAS/%Y%m%d/%Y%m%d-%H%M%S-00.jpg",time.localtime()) # once again, UTC time instead of local time
print("Saving sequence ",dtFileStr)
# save full resolution images to the NAS
camera.capture_sequence(filenames(),'jpeg',use_video_port=True, quality=92)
captureCount = captureCount-1
print(filenames())

finally:
camera.close()
print ('Program Terminated')