Skip to content

Commit

Permalink
Copied code from theory repository
Browse files Browse the repository at this point in the history
  • Loading branch information
lange50 committed Feb 9, 2024
1 parent cba0dcd commit 2fbdde8
Show file tree
Hide file tree
Showing 61 changed files with 32,089 additions and 0 deletions.
12 changes: 12 additions & 0 deletions .gitignore.gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
# ignore
*.mp4
*.dll
*.npz
*.pyc
*.pyc
*.pdf
*.svg
*.npz
*.gds
*.h5
*.png
3 changes: 3 additions & 0 deletions figures/.vscode/settings.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
{
"python.analysis.typeCheckingMode": "basic"
}
1,205 changes: 1,205 additions & 0 deletions figures/Autocorrelation/autocorrelation.ipynb

Large diffs are not rendered by default.

478 changes: 478 additions & 0 deletions figures/Calculations/Calculations.ipynb

Large diffs are not rendered by default.

716 changes: 716 additions & 0 deletions figures/Collective.py

Large diffs are not rendered by default.

584 changes: 584 additions & 0 deletions figures/Extinction/Extinction-1.ipynb

Large diffs are not rendered by default.

1,081 changes: 1,081 additions & 0 deletions figures/Extinction/Extinction-4.ipynb

Large diffs are not rendered by default.

1,870 changes: 1,870 additions & 0 deletions figures/Extinction/Extinction-4_extractor.ipynb

Large diffs are not rendered by default.

279 changes: 279 additions & 0 deletions figures/FrequencyShifting/FSM646.ipynb

Large diffs are not rendered by default.

143 changes: 143 additions & 0 deletions figures/FrequencyShifting/FSM652.ipynb

Large diffs are not rendered by default.

1,336 changes: 1,336 additions & 0 deletions figures/FrequencyShifting/FSM653.ipynb

Large diffs are not rendered by default.

74 changes: 74 additions & 0 deletions figures/FrequencyShifting/FSM653.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
import sys,os
import glob
sys.path.append('D:\\GitHub\\QuantumOpticsTheory\\CollectiveDecay\\Python\\CollectiveDecay')

import numpy as np
import matplotlib.pyplot as plt
from matplotlib.pyplot import cm
from matplotlib.colors import ListedColormap, LinearSegmentedColormap
from scipy.optimize import curve_fit
from scipy.interpolate import interp1d

import Transformations

save_path = r'C:\Users\chris\Dropbox\Apps\Overleaf\Superradiance and subradiance in light shifted organic molecules\Figures\FrequencyShifting' + '\\'

### Finding files ###
data_path = r'D:\QOExperimentData\FrequencyShiftMeasurement\measurement0653'
probes = sorted(glob.glob(data_path + "\\probe*"))


my_cmap = cm.get_cmap('gist_heat_r', 512)(np.linspace(0, 1, 512))
my_cmap[0:16,:] = np.array([1,1,1,1])
my_cmap = ListedColormap(my_cmap)


### Tuning trace plot ###
def get_scan_data(probe, rolling_avg=100):
res_path = probe + '\\RESONANCES.npz'
if not os.path.exists(res_path): return None, None
data = np.load(res_path, allow_pickle=True)
f = data['frequencies']
c = data['counts']
f = Transformations.match_arrays(f, c)
f = Transformations.rolling_average(f, rolling_avg)
c = Transformations.rolling_average(c, rolling_avg)
return f, c

interp_fun_list = []
for i,p in enumerate(probes):
if i in range(0, 500):
f, c = get_scan_data(p)
if f is None: continue
interp_fun_list.append(interp1d(f, c, bounds_error=False, fill_value=0))

c = 3e8
fmin, fmax = 382620, 382680
fmin, fmax = 382400, 382800
fmin, fmax = 382460, 382610
fmin, fmax = c / 784.4, c / 784

f_list = np.linspace(fmin, fmax, 1000)
l_list = np.linspace(c / fmax, c / fmin, 1000)
f_list = c / l_list

c_array = []
for interp_fun in np.flip(interp_fun_list):
c_array.append(interp_fun(f_list))
c_array = np.flip(np.transpose(c_array))
c_array /= np.max(c_array)
c_array[np.where(c_array > 0.1)] = 1

# extent = [0, c_array.shape[1], fmin - np.mean([fmin, fmax]), fmax - np.mean([fmin, fmax])]
extent = [0, c_array.shape[1], c / fmax, c / fmin]

fig, ax = plt.subplots(figsize=(175 / 72 * 4 / 3, 175 / 72))
# fig, ax = plt.subplots()
plt.rcParams.update({'font.size': 8, 'pdf.fonttype': 42, "font.family": "Arial"})

plt.imshow(c_array, aspect='auto', extent=extent, cmap=my_cmap)
plt.xlabel("Probe number")
plt.ylabel("Wavelength (nm)")
plt.yticks([784, 784.1, 784.2, 784.3, 784.4])

plt.tight_layout()
261 changes: 261 additions & 0 deletions figures/FrequencyShifting/FSM682.ipynb

Large diffs are not rendered by default.

41 changes: 41 additions & 0 deletions figures/FrequencyShifting/Transformations.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
# -*- coding: utf-8 -*-
"""
Created on Sat May 20 09:21:44 2023
@author: chris
"""
import numpy as np

def match_arrays(x, y):
return np.interp(np.linspace(0,len(x),len(y)), range(0,len(x)-1),x[0:-1])

def resize(x, n):
return np.interp(np.linspace(0, len(x), n), range(0,len(x)-1),x[0:-1])

def fill_evenly(data, locations, size):
assert len(data) == len(locations), 'Data and locations must be of the same length'
assert size > 0, 'Size should be a positive integer'
assert locations[-1] <= 1, 'Last location should not exceed 1'
locations[0] = 0

idx = [int(np.floor(loc*size)) for loc in locations]
output = np.array([data[0]] * size)
for i,_ in enumerate(idx):
if i + 1 < len(idx):
output[idx[i]:idx[i+1]] = data[i]
else:
output[idx[i]:output.size] = data[i]

return output


def rolling_average(x, n):
pad_width = n // 2
x = np.pad(x, pad_width, mode='edge')
window = np.ones(n) / n
return np.convolve(x, window,'valid')


if __name__=="__main__":
output = fill_evenly([1,5,7], [0,0.5,0.8], 10)
print(output)
106 changes: 106 additions & 0 deletions figures/FrequencyShifting/fig4a.ipynb

Large diffs are not rendered by default.

Binary file added figures/LevelStructure/LevelStructure.eps
Binary file not shown.
1,088 changes: 1,088 additions & 0 deletions figures/Lifetime/Lifetime.ipynb

Large diffs are not rendered by default.

291 changes: 291 additions & 0 deletions figures/Linewidth/AllTwoPhotonPeaks.ipynb

Large diffs are not rendered by default.

Loading

0 comments on commit 2fbdde8

Please sign in to comment.