first
This commit is contained in:
68
.gitignore
vendored
Normal file
68
.gitignore
vendored
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
# Python
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# Virtual Environment
|
||||||
|
venv/
|
||||||
|
env/
|
||||||
|
ENV/
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
|
||||||
|
# IDE
|
||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
|
*.swp
|
||||||
|
*.swo
|
||||||
|
.project
|
||||||
|
.pydevproject
|
||||||
|
.settings/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
*.ipynb
|
||||||
|
|
||||||
|
# Data and Models
|
||||||
|
*.pkl
|
||||||
|
*.h5
|
||||||
|
*.model
|
||||||
|
*.pt
|
||||||
|
*.pth
|
||||||
|
*.bin
|
||||||
|
encodings.pkl
|
||||||
|
|
||||||
|
# Logs and databases
|
||||||
|
*.log
|
||||||
|
*.sqlite
|
||||||
|
*.db
|
||||||
|
|
||||||
|
# OS generated files
|
||||||
|
.DS_Store
|
||||||
|
.DS_Store?
|
||||||
|
._*
|
||||||
|
.Spotlight-V100
|
||||||
|
.Trashes
|
||||||
|
ehthumbs.db
|
||||||
|
Thumbs.db
|
||||||
|
|
||||||
|
# Project specific
|
||||||
|
*.npy
|
||||||
|
*.npz venv/
|
||||||
337
load_tinystories.py
Normal file
337
load_tinystories.py
Normal file
@@ -0,0 +1,337 @@
|
|||||||
|
from datasets import load_dataset
|
||||||
|
import numpy as np
|
||||||
|
from collections import Counter
|
||||||
|
import re
|
||||||
|
import pickle
|
||||||
|
import os
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
class BiDict:
|
||||||
|
"""
|
||||||
|
Bidirectional dictionary for word-to-vector and vector-to-word mappings
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
self.word_to_vec = {}
|
||||||
|
self.vec_to_word = {}
|
||||||
|
|
||||||
|
def __setitem__(self, word, vector):
|
||||||
|
self.word_to_vec[word] = vector
|
||||||
|
self.vec_to_word[vector] = word
|
||||||
|
|
||||||
|
def __getitem__(self, key):
|
||||||
|
# Try word_to_vec first, then vec_to_word
|
||||||
|
return self.word_to_vec.get(key) or self.vec_to_word.get(key)
|
||||||
|
|
||||||
|
def __len__(self):
|
||||||
|
return len(self.word_to_vec)
|
||||||
|
|
||||||
|
def items(self):
|
||||||
|
return self.word_to_vec.items()
|
||||||
|
|
||||||
|
def values(self):
|
||||||
|
return self.word_to_vec.values()
|
||||||
|
|
||||||
|
def load_tinystories():
|
||||||
|
"""
|
||||||
|
Load the TinyStories dataset from Hugging Face.
|
||||||
|
Returns the dataset object containing train and validation splits.
|
||||||
|
"""
|
||||||
|
ds = load_dataset("roneneldan/TinyStories")
|
||||||
|
return ds
|
||||||
|
|
||||||
|
def tokenize_with_punctuation(text):
|
||||||
|
"""
|
||||||
|
Split text into words and punctuation marks as separate tokens.
|
||||||
|
Preserves spaces between words but treats punctuation as separate tokens.
|
||||||
|
"""
|
||||||
|
# Define pattern to split on word boundaries but keep punctuation as tokens
|
||||||
|
# Using raw string to properly escape special characters
|
||||||
|
pattern = r'([.,!?;:"\'()\[\]{}]|\s+|[a-zA-Z0-9]+)'
|
||||||
|
tokens = re.findall(pattern, text.lower())
|
||||||
|
# Filter out empty strings and pure whitespace, but keep punctuation
|
||||||
|
return [token for token in tokens if token.strip() or token in '.,!?;:"\'()[]{}']
|
||||||
|
|
||||||
|
def get_vocabulary(stories, N=12):
|
||||||
|
"""
|
||||||
|
Create vocabulary from the given stories.
|
||||||
|
Returns a bidirectional dictionary mapping words and vectors.
|
||||||
|
"""
|
||||||
|
# Get all unique tokens across all stories
|
||||||
|
all_tokens = set()
|
||||||
|
for story in stories:
|
||||||
|
tokens = tokenize_with_punctuation(story)
|
||||||
|
all_tokens.update(tokens)
|
||||||
|
|
||||||
|
# Sort tokens for consistent encoding
|
||||||
|
unique_tokens = sorted(all_tokens)
|
||||||
|
|
||||||
|
# Create unique N-bit vectors
|
||||||
|
num_tokens = len(unique_tokens)
|
||||||
|
if num_tokens > 2**N:
|
||||||
|
raise ValueError(f"Vocabulary size ({num_tokens}) exceeds {N}-bit capacity ({2**N})")
|
||||||
|
|
||||||
|
# Generate all possible N-bit numbers
|
||||||
|
all_possible = list(range(2**N))
|
||||||
|
np.random.shuffle(all_possible)
|
||||||
|
|
||||||
|
# Create unique random binary numbers for each token
|
||||||
|
token_to_vector = BiDict()
|
||||||
|
for i, token in enumerate(unique_tokens):
|
||||||
|
binary = format(all_possible[i], f'0{N}b')
|
||||||
|
token_to_vector[token] = binary
|
||||||
|
|
||||||
|
return token_to_vector
|
||||||
|
|
||||||
|
def save_encodings(vocab, encoded_stories, stories, filename='encodings.pkl'):
|
||||||
|
"""Save the encodings and vocabulary to a pickle file"""
|
||||||
|
data = {
|
||||||
|
'vocabulary': vocab,
|
||||||
|
'encoded_stories': encoded_stories,
|
||||||
|
'original_stories': stories
|
||||||
|
}
|
||||||
|
with open(filename, 'wb') as f:
|
||||||
|
pickle.dump(data, f)
|
||||||
|
|
||||||
|
def load_encodings(filename='encodings.pkl'):
|
||||||
|
"""Load encodings from pickle file if it exists"""
|
||||||
|
if os.path.exists(filename):
|
||||||
|
with open(filename, 'rb') as f:
|
||||||
|
data = pickle.load(f)
|
||||||
|
return data['vocabulary'], data['encoded_stories'], data['original_stories']
|
||||||
|
return None, None, None
|
||||||
|
|
||||||
|
def encode_stories(n_stories=30, force_encode=False, N=12):
|
||||||
|
"""
|
||||||
|
Encode the first n stories into N-bit vectors.
|
||||||
|
If encodings exist and force_encode is False, load from file.
|
||||||
|
Otherwise, create new encodings and save them.
|
||||||
|
"""
|
||||||
|
if not force_encode:
|
||||||
|
vocab, encoded_stories, stories = load_encodings()
|
||||||
|
if vocab is not None:
|
||||||
|
print("Loaded existing encodings from file")
|
||||||
|
return vocab, encoded_stories, stories
|
||||||
|
|
||||||
|
ds = load_tinystories()
|
||||||
|
stories = [ds['train'][i]['text'] for i in range(n_stories)]
|
||||||
|
print(stories)
|
||||||
|
# Get vocabulary mapping with specified N
|
||||||
|
vocab = get_vocabulary(stories, N=N)
|
||||||
|
|
||||||
|
# Encode stories
|
||||||
|
encoded_stories = []
|
||||||
|
for story in stories:
|
||||||
|
tokens = tokenize_with_punctuation(story)
|
||||||
|
encoded_tokens = [vocab[token] for token in tokens]
|
||||||
|
encoded_stories.append(encoded_tokens)
|
||||||
|
|
||||||
|
# Save the encodings
|
||||||
|
save_encodings(vocab, encoded_stories, stories)
|
||||||
|
print("Created and saved new encodings")
|
||||||
|
|
||||||
|
return vocab, encoded_stories, stories
|
||||||
|
|
||||||
|
def get_word_sequences(encoded_stories, M=100, N=12):
|
||||||
|
"""
|
||||||
|
Get sequences of M consecutive words from encoded stories.
|
||||||
|
Each word is N bits long.
|
||||||
|
"""
|
||||||
|
M_N_sequences = []
|
||||||
|
|
||||||
|
# Process each story with progress bar
|
||||||
|
for story in tqdm(encoded_stories, desc="Generating sequences"):
|
||||||
|
# Only process if story has enough words
|
||||||
|
if len(story) >= M:
|
||||||
|
# Get groups of M words, shifting by 1 word each time
|
||||||
|
for i in range(len(story) - M + 1):
|
||||||
|
word_group = story[i:i + M]
|
||||||
|
# Convert words to bit array
|
||||||
|
bits = []
|
||||||
|
for word in word_group:
|
||||||
|
bits.extend([int(bit) for bit in word])
|
||||||
|
vector = np.array(bits).reshape(M * N, 1)
|
||||||
|
M_N_sequences.append(vector)
|
||||||
|
|
||||||
|
return np.array(M_N_sequences)
|
||||||
|
|
||||||
|
def sequence_to_words(sequence, N=12):
|
||||||
|
"""
|
||||||
|
Convert a sequence vector back into a list of N-bit words
|
||||||
|
"""
|
||||||
|
# Convert sequence to flat list of bits
|
||||||
|
bits = [str(int(bit[0])) for bit in sequence]
|
||||||
|
# Split into N-bit chunks
|
||||||
|
words = [''.join(bits[i:i + N]) for i in range(0, len(bits), N)]
|
||||||
|
return words
|
||||||
|
|
||||||
|
def calculate_energy(sequences):
|
||||||
|
"""
|
||||||
|
Calculate the energy of each sequence.
|
||||||
|
"""
|
||||||
|
energies = []
|
||||||
|
hamiltonian = 0
|
||||||
|
for seq in sequences:
|
||||||
|
energy = -seq.dot(seq.T)/2
|
||||||
|
hamiltonian += energy
|
||||||
|
energies.append(energy)
|
||||||
|
plt.semilogy(-np.linalg.eigvals(hamiltonian), ".")
|
||||||
|
plt.show()
|
||||||
|
return energies, hamiltonian
|
||||||
|
|
||||||
|
def retrieve_sequences(sequences, partial_sequence, vocab, W, M=10, N=12, temperature=1.0):
|
||||||
|
"""
|
||||||
|
Retrieve the most likely next word using Ising Hamiltonian with temperature.
|
||||||
|
Uses associative memory to retrieve the last word of the sequence.
|
||||||
|
"""
|
||||||
|
# Convert partial sequence to vector
|
||||||
|
partial_vec = np.array([int(bit) for bit in partial_sequence]).reshape(-1, 1)
|
||||||
|
|
||||||
|
# Get all possible words from vocabulary
|
||||||
|
possible_words = list(vocab.values())
|
||||||
|
|
||||||
|
# Calculate weights matrix (Hebbian learning)
|
||||||
|
# Calculate energies for all possible words
|
||||||
|
word_energies = []
|
||||||
|
|
||||||
|
for word in possible_words:
|
||||||
|
# Create complete sequence with this word
|
||||||
|
complete_sequence = partial_sequence + word
|
||||||
|
if len(complete_sequence) == M*N: # Ensure correct length
|
||||||
|
complete_vec = np.array([int(bit) for bit in complete_sequence]).reshape(M * N, 1)
|
||||||
|
|
||||||
|
# Calculate energy using Ising Hamiltonian
|
||||||
|
energy_matrix = complete_vec.T.dot(W).dot(complete_vec)
|
||||||
|
energy = -0.5 * float(energy_matrix[0, 0])
|
||||||
|
|
||||||
|
word_energies.append((word, energy))
|
||||||
|
|
||||||
|
# Sort by energy
|
||||||
|
word_energies.sort(key=lambda x: x[1])
|
||||||
|
|
||||||
|
# Normalize energies to prevent overflow
|
||||||
|
energies = np.array([e[1] for e in word_energies])
|
||||||
|
energies = energies - np.min(energies) # Shift to make minimum energy 0
|
||||||
|
energies = energies / np.max(energies) if np.max(energies) > 0 else energies # Scale to [0,1]
|
||||||
|
|
||||||
|
# Calculate probabilities with normalized energies
|
||||||
|
probabilities = np.exp(-energies/temperature)
|
||||||
|
probabilities = probabilities / np.sum(probabilities)
|
||||||
|
|
||||||
|
# Check for valid probabilities
|
||||||
|
if np.any(np.isnan(probabilities)):
|
||||||
|
# Fallback to uniform distribution if numerical issues occur
|
||||||
|
probabilities = np.ones(len(word_energies)) / len(word_energies)
|
||||||
|
|
||||||
|
selected_idx = np.random.choice(len(word_energies), p=probabilities)
|
||||||
|
best_word, min_energy = word_energies[selected_idx]
|
||||||
|
|
||||||
|
# Find the word corresponding to the binary vector
|
||||||
|
for word, vector in vocab.items():
|
||||||
|
if vector == best_word:
|
||||||
|
return word, best_word, min_energy
|
||||||
|
|
||||||
|
def predict_sequence(initial_sequence, vocab, sequences, W, D=10, M=100, N=12, temperature=1.0):
|
||||||
|
"""
|
||||||
|
Predict D words iteratively by sliding the window.
|
||||||
|
"""
|
||||||
|
current_tokens = initial_sequence.copy()
|
||||||
|
predictions = []
|
||||||
|
energies = []
|
||||||
|
|
||||||
|
# Add progress bar for predictions
|
||||||
|
for _ in tqdm(range(D), desc="Predicting words"):
|
||||||
|
# Convert current tokens to binary sequence
|
||||||
|
partial_sequence = ""
|
||||||
|
for token in current_tokens:
|
||||||
|
partial_sequence += vocab[token]
|
||||||
|
|
||||||
|
# Predict next word
|
||||||
|
predicted_word, _, energy = retrieve_sequences(
|
||||||
|
sequences,
|
||||||
|
partial_sequence,
|
||||||
|
vocab,
|
||||||
|
W=W,
|
||||||
|
M=M,
|
||||||
|
N=N,
|
||||||
|
temperature=temperature
|
||||||
|
)
|
||||||
|
|
||||||
|
predictions.append(predicted_word)
|
||||||
|
energies.append(energy)
|
||||||
|
|
||||||
|
# Slide window: remove first token and add predicted word
|
||||||
|
current_tokens = current_tokens[1:] + [predicted_word]
|
||||||
|
|
||||||
|
return predictions, energies
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
N = 13 # Define N as a constant
|
||||||
|
M = 10 # Define M as a constant
|
||||||
|
D = 3 # Number of words to predict
|
||||||
|
temperature = 1.0 # Increased temperature for more diversity
|
||||||
|
|
||||||
|
print("Loading and encoding stories...")
|
||||||
|
# Force new encoding to ensure consistency
|
||||||
|
vocab, encoded_stories, original_stories = encode_stories(force_encode=True, N=N)
|
||||||
|
|
||||||
|
print("\nGenerating training sequences...")
|
||||||
|
# Get sequences for training
|
||||||
|
sequences = get_word_sequences(encoded_stories=encoded_stories, M=M, N=N)
|
||||||
|
print(f"Number of training sequences: {len(sequences)}")
|
||||||
|
print(f"Sequence shape: {sequences[0].shape if len(sequences) > 0 else 'No sequences found'}")
|
||||||
|
|
||||||
|
# Get initial sequence from first story
|
||||||
|
story_tokens = tokenize_with_punctuation(original_stories[0])
|
||||||
|
_, W = calculate_energy(sequences)
|
||||||
|
|
||||||
|
# Make sure we have enough tokens for M=100
|
||||||
|
if len(story_tokens) >= M-1:
|
||||||
|
initial_tokens = story_tokens[:M-1]
|
||||||
|
|
||||||
|
# Predict next D words
|
||||||
|
predicted_words, energies = predict_sequence(
|
||||||
|
initial_tokens,
|
||||||
|
vocab,
|
||||||
|
sequences,
|
||||||
|
W=W,
|
||||||
|
D=D,
|
||||||
|
M=M,
|
||||||
|
N=N,
|
||||||
|
temperature=temperature
|
||||||
|
)
|
||||||
|
|
||||||
|
# Print results
|
||||||
|
print("\nOriginal sequence:")
|
||||||
|
print(" ".join(initial_tokens[-10:])) # Last 10 tokens of initial sequence
|
||||||
|
print("\nPredicted sequence:")
|
||||||
|
print(" ".join(predicted_words))
|
||||||
|
print("\nEnergies:")
|
||||||
|
print(energies)
|
||||||
|
print("\nActual next words:")
|
||||||
|
print(" ".join(story_tokens[M-1:M-1+D])) # Next D actual words
|
||||||
|
else:
|
||||||
|
print(f"Story too short. Needs at least {M-1} tokens, but has {len(story_tokens)}")
|
||||||
|
|
||||||
|
# # Print example
|
||||||
|
# print(f"Total vocabulary size: {len(vocab)}")
|
||||||
|
# print("\nExample encoding for first story:")
|
||||||
|
# print("Original:", original_stories[0])
|
||||||
|
# print("First few tokens and their encodings:")
|
||||||
|
# tokens = tokenize_with_punctuation(original_stories[0])
|
||||||
|
# for token, encoding in zip(tokens[:10], encoded_stories[0][:10]):
|
||||||
|
# print(f"'{token}' -> {encoding}")
|
||||||
|
|
||||||
|
# # Get statistics about vector usage
|
||||||
|
# total_unique_in_vocab = len(vocab)
|
||||||
|
# total_unique_used = len(set([vec for story in encoded_stories for vec in story]))
|
||||||
|
# total_vectors = sum(len(story) for story in encoded_stories)
|
||||||
|
|
||||||
|
# print(f"\nTotal unique vectors in vocabulary: {total_unique_in_vocab}")
|
||||||
|
# print(f"Total unique vectors used in stories: {total_unique_used}")
|
||||||
|
# print(f"Total word occurrences: {total_vectors}")
|
||||||
|
# print(encoded_stories[0])
|
||||||
|
|
||||||
|
# print(sequences)
|
||||||
|
# plt.imshow(energies[0])
|
||||||
|
# plt.show()
|
||||||
61
predict_story.py
Normal file
61
predict_story.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
from load_tinystories import (
|
||||||
|
tokenize_with_punctuation,
|
||||||
|
load_encodings,
|
||||||
|
predict_sequence,
|
||||||
|
get_word_sequences,
|
||||||
|
BiDict
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_user_sentence():
|
||||||
|
"""Get a sentence from user and validate it has enough words"""
|
||||||
|
while True:
|
||||||
|
sentence = input("Enter a sentence (at least 99 words): ")
|
||||||
|
tokens = tokenize_with_punctuation(sentence)
|
||||||
|
if len(tokens) >= 99:
|
||||||
|
return tokens
|
||||||
|
print(f"Sentence too short. Got {len(tokens)} tokens, need 99.")
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Load saved encodings
|
||||||
|
vocab, encoded_stories, original_stories = load_encodings()
|
||||||
|
if vocab is None:
|
||||||
|
print("No saved encodings found. Please run load_tinystories.py first.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Constants
|
||||||
|
M = 100 # Sequence length
|
||||||
|
N = 13 # Bits per word
|
||||||
|
D = 10 # Number of words to predict
|
||||||
|
temperature = 1.0
|
||||||
|
|
||||||
|
print("Loading training sequences...")
|
||||||
|
sequences = get_word_sequences(encoded_stories=encoded_stories, M=M, N=N)
|
||||||
|
print(f"Loaded {len(sequences)} training sequences")
|
||||||
|
|
||||||
|
# Get sentence from user
|
||||||
|
print("\nI'll help you continue your story!")
|
||||||
|
initial_tokens = get_user_sentence()
|
||||||
|
|
||||||
|
# Predict next words
|
||||||
|
print("\nPredicting next words...")
|
||||||
|
predicted_words, energies = predict_sequence(
|
||||||
|
initial_tokens[:M-1], # Use first M-1 tokens
|
||||||
|
vocab,
|
||||||
|
sequences,
|
||||||
|
D=D,
|
||||||
|
M=M,
|
||||||
|
N=N,
|
||||||
|
temperature=temperature
|
||||||
|
)
|
||||||
|
|
||||||
|
# Print results
|
||||||
|
print("\nYour input ended with:")
|
||||||
|
print(" ".join(initial_tokens[-10:])) # Last 10 tokens
|
||||||
|
print("\nPredicted continuation:")
|
||||||
|
print(" ".join(predicted_words))
|
||||||
|
print("\nEnergies of predictions:")
|
||||||
|
for i, (word, energy) in enumerate(zip(predicted_words, energies)):
|
||||||
|
print(f"Word {i+1} ('{word}'): {energy:.4f}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
332
sentences.txt
Normal file
332
sentences.txt
Normal file
@@ -0,0 +1,332 @@
|
|||||||
|
“You can’t, all right? You can’t help me. No one can help me. My wife is dead, and the police think
|
||||||
|
I killed her.” His voice is rising, spots of colour appear on his cheeks. “They think I killed her.”
|
||||||
|
“But . . . Kamal Abdic . . .”
|
||||||
|
The chair crashes against the kitchen wall with such force that one of the legs splinters away. I
|
||||||
|
jump back in fright, but Scott has barely moved. His hands are back at his sides, balled into fists. I can
|
||||||
|
see the veins under his skin.
|
||||||
|
“Kamal Abdic,” he says, teeth gritted, “is no longer a suspect.” His tone is even, but he is
|
||||||
|
struggling to restrain himself. I can feel the anger vibrating off him. I want to get to the front door,
|
||||||
|
but he is in my way, blocking my path, blocking out what little light there was in the room.
|
||||||
|
“Do you know what he’s been saying?” he asks, turning away from me to pick up the chair. Of
|
||||||
|
course I don’t, I think, but I realize once again that he’s not really talking to me. “Kamal’s got all sorts
|
||||||
|
of stories. Kamal says that Megan was unhappy, that I was a jealous, controlling husband, a—what
|
||||||
|
was the word?—an emotional abuser.” He spits the words out in disgust. “Kamal says Megan was
|
||||||
|
afraid of me.”
|
||||||
|
“But he’s—”
|
||||||
|
“He isn’t the only one. That friend of hers, Tara—she says that Megan asked her to cover for her
|
||||||
|
sometimes, that Megan wanted her to lie to me about where she was, what she was doing.”
|
||||||
|
He places the chair back at the table and it falls over. I take a step towards the hallway, and he looks
|
||||||
|
at me then. “I am a guilty man,” he says, his face a twist of anguish. “I am as good as convicted.”
|
||||||
|
He kicks the broken chair aside and sits down on one of the three remaining good ones. I hover,
|
||||||
|
unsure. Stick or twist? He starts to talk again, his voice so soft I can barely hear him. “Her phone was
|
||||||
|
in her pocket,” he says. I take a step closer to him. “There was a message on it from me. The last thing
|
||||||
|
I ever said to her, the last words she ever read, were Go to hell you lying bitch.”
|
||||||
|
His chin on his chest, his shoulders start to shake. I am close enough to touch him. I raise my hand
|
||||||
|
and, trembling, put my fingers lightly on the back of his neck. He doesn’t shrug me away.
|
||||||
|
“I’m sorry,” I say, and I mean it, because although I’m shocked to hear the words, to imagine that
|
||||||
|
he could speak to her like that, I know what it is to love someone and to say the most terrible things to
|
||||||
|
them, in anger or anguish. “A text message,” I say. “It’s not enough. If that’s all they have . . .”
|
||||||
|
“It’s not, though, is it?” He straightens up then, shrugging my hand away from him. I walk back
|
||||||
|
around the table and sit down opposite him. He doesn’t look up at me. “I have a motive. I didn’t
|
||||||
|
behave . . . I didn’t react the right way when she walked out. I didn’t panic soon enough. I didn’t call
|
||||||
|
her soon enough.” He gives a bitter laugh. “And there is a pattern of abusive behaviour, according to
|
||||||
|
Kamal Abdic.” It’s then that he looks up at me, that he sees me, that a light comes on. Hope. “You . . .
|
||||||
|
you can talk to the police. You can tell them that it’s a lie, that he’s lying. You can at least give another
|
||||||
|
side of the story, tell them that I loved her, that we were happy.”
|
||||||
|
I can feel panic rising in my chest. He thinks I can help him. He is pinning his hopes on me and all I
|
||||||
|
have for him is a lie, a bloody lie.
|
||||||
|
“They won’t believe me,” I say weakly. “They don’t believe me. I’m an unreliable witness.”
|
||||||
|
The silence between us swells and fills the room; a fly buzzes angrily against the French doors.
|
||||||
|
Scott picks at the dried blood on his cheek, I can hear his nails scraping against his skin. I push my
|
||||||
|
chair back, the legs scraping on the tiles, and he looks up.
|
||||||
|
“You were here,” he says, as though the piece of information I gave him fifteen minutes ago is
|
||||||
|
only now sinking in. “You were in Witney the night Megan went missing?”
|
||||||
|
I can barely hear him above the blood thudding in my ears. I nod.
|
||||||
|
“Why didn’t you tell the police that?” he asks. I can see the muscle tic in his jaw.
|
||||||
|
“I did. I did tell them that. But I didn’t have . . . I didn’t see anything. I don’t remember anything.”
|
||||||
|
He gets to his feet, walks over to the French doors and pulls back the curtain. The sunshine is
|
||||||
|
momentarily blinding. Scott stands with his back to me, his arms folded.
|
||||||
|
“You were drunk,” he says matter-of-factly. “But you must remember something. You must—that’s
|
||||||
|
why you keep coming back here, isn’t it?” He turns around to face me. “That’s it, isn’t it? Why you
|
||||||
|
keep contacting me. You know something.” He’s saying this as though it’s fact: not a question, not an
|
||||||
|
accusation, not a theory. “Did you see his car?” he asks. “Think. Blue Vauxhall Corsa. Did you see it?”
|
||||||
|
I shake my head and he throws his arms up in frustration. “Don’t just dismiss it. Really think. What did
|
||||||
|
you see? You saw Anna Watson, but that doesn’t mean anything. You saw—come on! Who did you
|
||||||
|
see?”
|
||||||
|
Blinking into the sunlight, I try desperately to piece together what I saw, but nothing comes.
|
||||||
|
Nothing real, nothing helpful. Nothing I could say out loud. I was in an argument. Or perhaps I
|
||||||
|
witnessed an argument. I stumbled on the station steps, a man with red hair helped me up—I think that
|
||||||
|
he was kind to me, although now he makes me feel afraid. I know that I had a cut on my head, another
|
||||||
|
on my lip, bruises on my arms. I think I remember being in the underpass. It was dark. I was
|
||||||
|
frightened, confused. I heard voices. I heard someone call Megan’s name. No, that was a dream. That
|
||||||
|
wasn’t real. I remember blood. Blood on my head, blood on my hands. I remember Anna. I don’t
|
||||||
|
remember Tom. I don’t remember Kamal or Scott or Megan.
|
||||||
|
He is watching me, waiting for me to say something, to offer him some crumb of comfort, but I
|
||||||
|
have none.
|
||||||
|
“That night,” he says, “that’s the key time.” He sits back down at the table, closer to me now, his
|
||||||
|
back to the window. There is a sheen of sweat on his forehead and his upper lip, and he shivers as
|
||||||
|
though with fever. “That’s when it happened. They think that’s when it happened. They can’t be
|
||||||
|
sure . . .” He tails off. “They can’t be sure. Because of the condition . . . of the body.” He takes a deep
|
||||||
|
breath. “But they think it was that night. Or soon after.” He’s back on autopilot, speaking to the room,
|
||||||
|
not to me. I listen in silence as he tells the room that the cause of death was head trauma, her skull was
|
||||||
|
fractured in several places. No sexual assault, or at least none that they could confirm, because of her
|
||||||
|
condition. Her condition, which was ruined.
|
||||||
|
When he comes back to himself, back to me, there is fear in his eyes, desperation.
|
||||||
|
“If you remember anything,” he says, “you have to help me. Please, try to remember, Rachel.” The
|
||||||
|
sound of my name on his lips makes my stomach flip, and I feel wretched.
|
||||||
|
On the train, on the way home, I think about what he said, and I wonder if it’s true. Is the reason that
|
||||||
|
I can’t let go of this trapped inside my head? Is there some knowledge I’m desperate to impart? I
|
||||||
|
know that I feel something for him, something I can’t name and shouldn’t feel. But is it more than
|
||||||
|
that? If there’s something in my head, then maybe someone can help me get it out. Someone like a
|
||||||
|
psychiatrist. A therapist. Someone like Kamal Abdic.
|
||||||
|
T UESDAY, AUGUST 6, 2013
|
||||||
|
MORNING
|
||||||
|
I’ve barely slept. All night, I lay awake thinking about it, turning it over and over in my mind. Is this
|
||||||
|
stupid, reckless, pointless? Is it dangerous? I don’t know what I’m doing. I made an appointment
|
||||||
|
yesterday morning to see Dr. Kamal Abdic. I rang his surgery and spoke to a receptionist, asked for
|
||||||
|
him by name. I might have been imagining it, but I thought she sounded surprised. She said he could
|
||||||
|
see me today at four thirty. So soon? My heart battering my ribs, my mouth dry, I said that would be
|
||||||
|
fine. The session costs £75. That £300 from my mother is not going to last very long.
|
||||||
|
Ever since I made the appointment, I haven’t been able to think of anything else. I’m afraid, but I’m
|
||||||
|
excited, too. I can’t deny that there’s a part of me that finds the idea of meeting Kamal thrilling.
|
||||||
|
Because all this started with him: a glimpse of him and my life changed course, veered off the tracks.
|
||||||
|
The moment I saw him kiss Megan, everything changed.
|
||||||
|
And I need to see him. I need to do something, because the police are only interested in Scott. They
|
||||||
|
had him in for questioning again yesterday. They won’t confirm it, of course, but there’s footage on
|
||||||
|
the Internet: Scott, walking into the police station, his mother at his side. His tie was too tight, he
|
||||||
|
looked strangled.
|
||||||
|
Everyone speculates. The newspapers say that the police are being more circumspect, that they
|
||||||
|
cannot afford to make another hasty arrest. There is talk of a botched investigation, suggestions that a
|
||||||
|
change in personnel may be required. On the Internet, the talk about Scott is horrible, the theories
|
||||||
|
wild, disgusting. There are screen grabs of him giving his first tearful appeal for Megan’s return, and
|
||||||
|
next to them are pictures of killers who had also appeared on television, sobbing, seemingly
|
||||||
|
distraught at the fate of their loved ones. It’s horrific, inhuman. I can only pray that he never looks at
|
||||||
|
this stuff. It would break his heart.
|
||||||
|
So, stupid and reckless I may be, but I am going to see Kamal Abdic, because unlike all the
|
||||||
|
speculators, I have seen Scott. I’ve been close enough to touch him, I know what he is, and he isn’t a
|
||||||
|
murderer.
|
||||||
|
EVENING
|
||||||
|
My legs are still trembling as I climb the steps to Corly station. I’ve been shaking like this for hours,
|
||||||
|
it must be the adrenaline, my heart just won’t slow down. The train is packed—no chance of a seat
|
||||||
|
here, it’s not like getting on at Euston, so I have to stand, midway through a carriage. It’s like a
|
||||||
|
sweatbox. I’m trying to breathe slowly, my eyes cast down to my feet. I’m just trying to get a handle
|
||||||
|
on what I’m feeling.
|
||||||
|
Exultation, fear, confusion and guilt. Mostly guilt.
|
||||||
|
It wasn’t what I expected.
|
||||||
|
By the time I got to the practice, I’d worked myself up into a state of complete and utter terror: I
|
||||||
|
was convinced that he was going to look at me and somehow know that I knew, that he was going to
|
||||||
|
view me as a threat. I was afraid that I would say the wrong thing, that somehow I wouldn’t be able to
|
||||||
|
stop myself from saying Megan’s name. Then I walked into a doctor ’s waiting room, boring and
|
||||||
|
bland, and spoke to a middle-aged receptionist, who took my details without really looking at me. I
|
||||||
|
sat down and picked up a copy of Vogue and flicked through it with trembling fingers, trying to focus
|
||||||
|
my mind on the task ahead while at the same time attempting to look unremarkably bored, just like
|
||||||
|
any other patient.
|
||||||
|
There were two others in there: a twentysomething man reading something on his phone and an
|
||||||
|
older woman who stared glumly at her feet, not once looking up, even when her name was called by
|
||||||
|
the receptionist. She just got up and shuffled off, she knew where she was going. I waited there for
|
||||||
|
five minutes, ten. I could feel my breathing getting shallow. The waiting room was warm and airless,
|
||||||
|
and I felt as though I couldn’t get enough oxygen into my lungs. I worried that I might faint.
|
||||||
|
Then a door flew open and a man came out, and before I’d even had time to see him properly, I
|
||||||
|
knew that it was him. I knew the way I knew that he wasn’t Scott the first time I saw him, when he was
|
||||||
|
nothing but a shadow moving towards her—just an impression of tallness, of loose, languid
|
||||||
|
movement. He held out his hand to me.
|
||||||
|
“Ms. Watson?”
|
||||||
|
I raised my eyes to meet his and felt a jolt of electricity all the way down my spine. I put my hand
|
||||||
|
into his. It was warm and dry and huge, enveloping the whole of mine.
|
||||||
|
“Please,” he said, indicating for me to follow him into his office, and I did, feeling sick, dizzy all
|
||||||
|
the way. I was walking in her footsteps. She did all this. She sat opposite him in the chair he told me to
|
||||||
|
sit in, he probably folded his hands just below his chin the way he did this afternoon, he probably
|
||||||
|
nodded at her in the same way, saying, “OK, what would you like to talk to me about today?”
|
||||||
|
Everything about him was warm: his hand, when I shook it; his eyes; the tone of his voice. I
|
||||||
|
searched his face for clues, for signs of the vicious brute who smashed Megan’s head open, for a
|
||||||
|
glimpse of the traumatized refugee who had lost his family. I couldn’t see any. And for a while, I
|
||||||
|
forgot myself. I forgot to be afraid of him. I was sitting there and I wasn’t panicking any longer. I
|
||||||
|
swallowed hard and tried to remember what I had to say, and I said it. I told him that for four years I’d
|
||||||
|
had problems with alcohol, that my drinking had cost me my marriage and my job, it was costing me
|
||||||
|
my health, obviously, and I feared it might cost me my sanity, too.
|
||||||
|
“I don’t remember things,” I said. “I black out and I can’t remember where I’ve been or what I’ve
|
||||||
|
done. Sometimes I wonder if I’ve done or said terrible things, and I can’t remember. And if . . . if
|
||||||
|
someone tells me something I’ve done, it doesn’t even feel like me. It doesn’t feel like it was me who
|
||||||
|
was doing that thing. And it’s so hard to feel responsible for something you don’t remember. So I
|
||||||
|
never feel bad enough. I feel bad, but the thing that I’ve done—it’s removed from me. It’s like it
|
||||||
|
doesn’t belong to me.”
|
||||||
|
All this came out, all this truth, I just spilled it in front of him in the first few minutes of being in
|
||||||
|
his presence. I was so ready to say it, I’d been waiting to say it to someone. But it shouldn’t have been
|
||||||
|
him. He listened, his clear amber eyes on mine, his hands folded, motionless. He didn’t look around
|
||||||
|
the room or make notes. He listened. And eventually he nodded slightly and said, “You want to take
|
||||||
|
responsibility for what you have done, and you find it difficult to do that, to feel fully accountable if
|
||||||
|
you cannot remember it?”
|
||||||
|
“Yes, that’s it, that’s exactly it.”
|
||||||
|
“So, how do we take responsibility? You can apologize—and even if you cannot remember
|
||||||
|
committing your transgression, that doesn’t mean that your apology, and the sentiment behind your
|
||||||
|
apology, is not sincere.”
|
||||||
|
“But I want to feel it. I want to feel . . . worse.”
|
||||||
|
It’s an odd thing to say, but I think this all the time. I don’t feel bad enough. I know what I’m
|
||||||
|
responsible for, I know all the terrible things I’ve done, even if I don’t remember the details—but I
|
||||||
|
feel distanced from those actions. I feel them at one remove.
|
||||||
|
“You think that you should feel worse than you do? That you don’t feel bad enough for your
|
||||||
|
mistakes?”
|
||||||
|
“Yes.”
|
||||||
|
Kamal shook his head. “Rachel, you have told me that you lost your marriage, you lost your job—
|
||||||
|
do you not think this is punishment enough?”
|
||||||
|
I shook my head.
|
||||||
|
He leaned back a little in his chair. “I think perhaps you are being rather hard on yourself.”
|
||||||
|
“I’m not.”
|
||||||
|
“All right. OK. Can we go back a bit? To when the problem started. You said it was . . . four years
|
||||||
|
ago? Can you tell me about that time?”
|
||||||
|
I resisted. I wasn’t completely lulled by the warmth of his voice, by the softness of his eyes. I
|
||||||
|
wasn’t completely hopeless. I wasn’t going to start telling him the whole truth. I wasn’t going to tell
|
||||||
|
him how I longed for a baby. I told him that my marriage broke down, that I was depressed, and that
|
||||||
|
I’d always been a drinker, but that things just got out of hand.
|
||||||
|
“Your marriage broke down, so . . . you left your husband, or he left you, or . . . you left each
|
||||||
|
other?”
|
||||||
|
“He had an affair,” I said. “He met another woman and fell in love with her.” He nodded, waiting
|
||||||
|
for me to go on. “It wasn’t his fault, though. It was my fault.”
|
||||||
|
“Why do you say that?”
|
||||||
|
“Well, the drinking started before . . .”
|
||||||
|
“So your husband’s affair was not the trigger?”
|
||||||
|
“No, I’d already started, my drinking drove him away, it was why he stopped . . .”
|
||||||
|
Kamal waited, he didn’t prompt me to go on, he just let me sit there, waiting for me to say the
|
||||||
|
words out loud.
|
||||||
|
“Why he stopped loving me,” I said.
|
||||||
|
I hate myself for crying in front of him. I don’t understand why I couldn’t keep my guard up. I
|
||||||
|
shouldn’t have talked about real things, I should have gone in there with some totally made-up
|
||||||
|
problems, some imaginary persona. I should have been better prepared.
|
||||||
|
I hate myself for looking at him and believing, for a moment, that he felt for me. Because he
|
||||||
|
looked at me as though he did, not as though he pitied me, but as though he understood me, as though
|
||||||
|
I was someone he wanted to help.
|
||||||
|
“So then, Rachel, the drinking started before the breakdown of your marriage. Do you think you
|
||||||
|
can point to an underlying cause? I mean, not everyone can. For some people, there is just a general
|
||||||
|
slide into a depressive or an addicted state. Was there something specific for you? A bereavement,
|
||||||
|
some other loss?”
|
||||||
|
I shook my head, shrugged. I wasn’t going to tell him that. I will not tell him that.
|
||||||
|
He waited for a few moments and then glanced quickly at the clock on his desk.
|
||||||
|
“We will pick up next time, perhaps?” he said, and then he smiled and I went cold.
|
||||||
|
Everything about him is warm—his hands, his eyes, his voice—everything but the smile. You can
|
||||||
|
see the killer in him when he shows his teeth. My stomach a hard ball, my pulse skyrocketing again, I
|
||||||
|
left his office without shaking his outstretched hand. I couldn’t stand to touch him.
|
||||||
|
I understand, I do. I can see what Megan saw in him, and it’s not just that he’s arrestingly handsome.
|
||||||
|
He’s also calm and reassuring, he exudes a patient kindness. Someone innocent or trusting or simply
|
||||||
|
troubled might not see through all that, might not see that under all that calm he’s a wolf. I understand
|
||||||
|
that. For almost an hour, I was drawn in. I let myself open up to him. I forgot who he was. I betrayed
|
||||||
|
Scott, and I betrayed Megan, and I feel guilty about that.
|
||||||
|
But most of all, I feel guilty because I want to go back.
|
||||||
|
WEDNESDAY, AUGUST 7, 2013
|
||||||
|
MORNING
|
||||||
|
I had it again, the dream where I’ve done something wrong, where everyone is against me, sides with
|
||||||
|
Tom. Where I can’t explain, or even apologize, because I don’t know what the thing is. In the space
|
||||||
|
between dreaming and wakefulness, I think of a real argument, long ago—four years ago—after our
|
||||||
|
first and only round of IVF failed, when I wanted to try again. Tom told me we didn’t have the money,
|
||||||
|
and I didn’t question that. I knew we didn’t—we’d taken on a big mortgage, he had some debts left
|
||||||
|
over from a bad business deal his father had coaxed him into pursuing—I just had to deal with it. I just
|
||||||
|
had to hope that one day we would have the money, and in the meantime I had to bite back the tears
|
||||||
|
that came, hot and fast, every time I saw a stranger with a bump, every time I heard someone else’s
|
||||||
|
happy news.
|
||||||
|
It was a couple of months after we’d found out that the IVF had failed that he told me about the trip.
|
||||||
|
Vegas, for four nights, to watch the big fight and let off some steam. Just him and a couple of his
|
||||||
|
mates from the old days, people I had never met. It cost a fortune, I know, because I saw the booking
|
||||||
|
receipt for the flight and the room in his email inbox. I’ve no idea what the boxing tickets cost, but I
|
||||||
|
can’t imagine they were cheap. It wasn’t enough to pay for a round of IVF, but it would have been a
|
||||||
|
start. We had a horrible fight about it. I don’t remember the details because I’d been drinking all
|
||||||
|
afternoon, working myself up to confront him about it, so when I did it was in the worst possible way.
|
||||||
|
I remember his coldness the next day, his refusal to speak about it. I remember him telling me, in flat
|
||||||
|
disappointed tones, what I’d done and said, how I’d smashed our framed wedding photograph, how
|
||||||
|
I’d screamed at him for being so selfish, how I’d called him a useless husband, a failure. I remember
|
||||||
|
how much I hated myself that day.
|
||||||
|
I was wrong, of course I was, to say those things to him, but what comes to me now is that I wasn’t
|
||||||
|
unreasonable to be angry. I had every right to be angry, didn’t I? We were trying to have a baby—
|
||||||
|
shouldn’t we have been prepared to make sacrifices? I would have cut off a limb if it meant I could
|
||||||
|
have had a child. Couldn’t he have forgone a weekend in Vegas?
|
||||||
|
I lie in bed for a bit, thinking about that, and then I get up and decide to go for a walk, because if I
|
||||||
|
don’t do something I’m going to want to go round to the corner shop. I haven’t had a drink since
|
||||||
|
Sunday and I can feel the fight going on within me, the longing for a little buzz, the urge to get out of
|
||||||
|
my head, smashing up against the vague feeling that something has been accomplished and that it
|
||||||
|
would be a shame to throw it away now.
|
||||||
|
Ashbury isn’t really a good place to walk, it’s just shops and suburbs, there isn’t even a decent
|
||||||
|
park. I head off through the middle of town, which isn’t so bad when there’s no one else around. The
|
||||||
|
trick is to fool yourself into thinking that you’re headed somewhere: just pick a spot and set off
|
||||||
|
towards it. I chose the church at the top of Pleasance Road, which is about two miles from Cathy’s flat.
|
||||||
|
I’ve been to an AA meeting there. I didn’t go to the local one because I didn’t want to bump into
|
||||||
|
anyone I might see on the street, in the supermarket, on the train.
|
||||||
|
When I get to the church, I turn around and walk back, striding purposefully towards home, a
|
||||||
|
woman with things to do, somewhere to go. Normal. I watch the people I pass—the two men running,
|
||||||
|
backpacks on, training for the marathon, the young woman in a black skirt and white trainers, heels in
|
||||||
|
her bag, on her way to work—and I wonder what they’re hiding. Are they moving to stop drinking,
|
||||||
|
running to stand still? Are they thinking about the killer they met yesterday, the one they’re planning
|
||||||
|
to see again?
|
||||||
|
I’m not normal.
|
||||||
|
I’m almost home when I see it. I’ve been lost in thought, thinking about what these sessions with
|
||||||
|
Kamal are actually supposed to achieve: am I really planning to rifle through his desk drawers if he
|
||||||
|
happens to leave the room? To try to trap him into saying something revealing, to lead him into
|
||||||
|
dangerous territory? Chances are he’s a lot cleverer than I am; chances are he’ll see me coming. After
|
||||||
|
all, he knows his name has been in the papers—he must be alert to the possibility of people trying to
|
||||||
|
get stories on him or information from him.
|
||||||
|
This is what I’m thinking about, head down, eyes on the pavement, as I pass the little Londis shop
|
||||||
|
on the right and try not to look at it because it raises possibilities, but out of the corner of my eye I see
|
||||||
|
her name. I look up and it’s there, in huge letters on the front of a tabloid newspaper: WAS MEGAN A
|
||||||
|
CHILD KILLER?
|
||||||
|
ANNA
|
||||||
|
• • •
|
||||||
|
WEDNESDAY, AUGUST 7, 2013
|
||||||
|
MORNING
|
||||||
|
I was with the National Childbirth Trust girls at Starbucks when it happened. We were sitting in our
|
||||||
|
usual spot by the window, the kids were spreading Lego all over the floor, Beth was trying (yet again)
|
||||||
|
to persuade me to join her book club, and then Diane showed up. She had this look on her face, the
|
||||||
|
self-importance of someone who is about to deliver a piece of particularly juicy gossip. She could
|
||||||
|
barely contain herself as she struggled to get her double buggy through the door.
|
||||||
|
“Anna,” she said, her face grave, “have you seen this?” She held up a newspaper with the headline
|
||||||
|
WAS MEGAN A CHILD KILLER? I was speechless. I just stared at it and, ridiculously, burst into tears. Evie
|
||||||
|
was horrified. She howled. It was awful.
|
||||||
|
I went to the loos to clean myself (and Evie) up, and when I got back they were all speaking in
|
||||||
|
hushed tones. Diane glanced slyly up at me and asked, “Are you all right, sweetie?” She was enjoying
|
||||||
|
it, I could tell.
|
||||||
|
I had to leave then, I couldn’t stay. They were all being terribly concerned, saying how awful it
|
||||||
|
must be for me, but I could see it on their faces: thinly disguised disapproval. How could you entrust
|
||||||
|
your child to that monster? You must be the worst mother in the world.
|
||||||
|
I tried to call Tom on the way home, but his phone just went straight to voice mail. I left him a
|
||||||
|
message to ring me back as soon as possible—I tried to keep my voice light and even, but I was
|
||||||
|
trembling and my legs felt shaky, unsteady.
|
||||||
|
I didn’t buy the paper, but I couldn’t resist reading the story online. It all sounds rather vague.
|
||||||
|
“Sources close to the Hipwell investigation” claim an allegation has been made that Megan “may have
|
||||||
|
been involved in the unlawful killing of her own child” ten years ago. The “sources” also speculate
|
||||||
|
that this could be a motive for her murder. The detective in charge of the whole investigation—
|
||||||
|
Gaskill, the one who came to speak to us after she went missing—made no comment.
|
||||||
|
Tom rang me back—he was in between meetings, he couldn’t come home. He tried to placate me,
|
||||||
|
he made all the right noises, he told me it was probably a load of rubbish anyway. “You know you
|
||||||
|
can’t believe half the stuff they print in the newspapers.” I didn’t make too much of a fuss, because he
|
||||||
|
was the one who suggested she come and help out with Evie in the first place. He must be feeling
|
||||||
|
horrible.
|
||||||
|
And he’s right. It may not even be true. But who would come up with a story like that? Why would
|
||||||
|
you make up a thing like that? And I can’t help thinking, I knew. I always knew there was something
|
||||||
|
off about that woman. At first I just thought she was a bit immature, but it was more than that, she was
|
||||||
|
sort of absent. Self-involved. I’m not going to lie—I’m glad she’s gone. Good riddance.
|
||||||
|
EVENING
|
||||||
|
I’m upstairs, in the bedroom. Tom’s watching TV with Evie. We’re not talking. It’s my fault. He
|
||||||
|
walked in the door and I just went for him.
|
||||||
|
I was building up to it all day. I couldn’t help it, couldn’t hide from it, she was everywhere I looked.
|
||||||
|
Here, in my house, holding my child, feeding her, changing her, playing with her while I was taking a
|
||||||
|
nap. I kept thinking of all the times I left Evie alone with her, and it made me sick.
|
||||||
|
And then the paranoia came, that feeling I’ve had almost all the time I’ve lived in this house, of
|
||||||
|
being watched. At first, I used to put it down to the trains. All those faceless bodies staring out of the
|
||||||
|
windows, staring right across at us, it gave me the creeps. It was one of the many reasons why I didn’t
|
||||||
|
want to move in here in the first place, but Tom wouldn’t leave. He said we’d lose money on the sale.
|
||||||
|
At first the trains, and then Rachel. Rachel watching us, turning up on the street, calling us up all the
|
||||||
|
time. And then even Megan, when she was here with Evie: I always felt she had half an eye on me, as
|
||||||
|
though she were assessing me, assessing my parenting, judging me for not being able to cope on my
|
||||||
|
own. Ridiculous, I know. Then I think about that day when Rachel came to the house and took Evie,
|
||||||
|
and my whole body goes cold and I think, I’m not being ridiculous at all.
|
||||||
|
So by the time Tom came home, I was spoiling for a fight. I issued an ultimatum: we have to leave,
|
||||||
|
there’s no way I can stay in this house, on this road, knowing everything that has gone on here.
|
||||||
|
Everywhere I look now I have to see not only Rachel, but Megan, too. I have to think about everything
|
||||||
|
she touched. It’s too much. I said I didn’t care whether we got a good price for the house or not.
|
||||||
|
“You will care when we’re forced to live in a much worse place, when we can’t make our
|
||||||
|
mortgage payments,” he said, perfectly reasonably. I asked whether he couldn’t ask his parents to help
|
||||||
|
out—they have plenty of money—but he said he wouldn’t ask them, that he’d never ask them for
|
||||||
|
anything again, and he got angry then, said he didn’t want to talk about it anymore. It’s because of
|
||||||
|
how his parents treated him when he left Rachel for me. I shouldn’t even have mentioned them, it
|
||||||
|
always pisses him off.
|
||||||
|
But I can’t help it. I feel desperate, because now every time I close my eyes I see her, sitting there at
|
||||||
|
the kitchen table with Evie on her lap. She’d be playing with her and smiling and chattering, but it
|
||||||
|
never seemed real, it never seemed as if she really wanted to be there. She always seemed so happy to
|
||||||
|
be handing Evie back to me when it was time for her to go. It was almost as though she didn’t like the
|
||||||
|
feel of a child in her arms.
|
||||||
242
spin_glass.py
Normal file
242
spin_glass.py
Normal file
@@ -0,0 +1,242 @@
|
|||||||
|
import numpy as np
|
||||||
|
from itertools import product
|
||||||
|
import re
|
||||||
|
|
||||||
|
class SpinGlassHamiltonian:
|
||||||
|
def __init__(self, n_words=4, word_bits=8, text_file='sentences.txt', seed=None):
|
||||||
|
"""
|
||||||
|
Initialize a fully connected spin glass Hamiltonian with J_ij couplings
|
||||||
|
constructed from text patterns
|
||||||
|
|
||||||
|
Args:
|
||||||
|
n_words (int): Number of words per pattern
|
||||||
|
word_bits (int): Number of bits to encode each word
|
||||||
|
text_file (str): Path to file containing training sentences
|
||||||
|
seed (int): Random seed for reproducibility
|
||||||
|
"""
|
||||||
|
if seed is not None:
|
||||||
|
np.random.seed(seed)
|
||||||
|
|
||||||
|
self.n_words = n_words
|
||||||
|
self.word_bits = word_bits
|
||||||
|
self.n_spins = n_words * word_bits
|
||||||
|
|
||||||
|
# Load text from file
|
||||||
|
try:
|
||||||
|
with open(text_file, 'r') as f:
|
||||||
|
text = f.read()
|
||||||
|
except FileNotFoundError:
|
||||||
|
print(f"Warning: {text_file} not found. Using default text.")
|
||||||
|
text = """
|
||||||
|
The quantum spin glass model shows fascinating behavior
|
||||||
|
in statistical physics and complex systems research
|
||||||
|
many body interactions lead to frustration effects
|
||||||
|
ground state properties reveal emergent phenomena
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Generate patterns from text
|
||||||
|
self.patterns, self.words = self.text_to_patterns(text, n_words, word_bits)
|
||||||
|
|
||||||
|
# Initialize J matrix for 2-point interactions
|
||||||
|
self.J = np.zeros((self.n_spins, self.n_spins))
|
||||||
|
|
||||||
|
# Construct J using patterns
|
||||||
|
for pattern in self.patterns:
|
||||||
|
flat_pattern = pattern.flatten()
|
||||||
|
# 2-point interactions
|
||||||
|
self.J += np.outer(flat_pattern, flat_pattern)
|
||||||
|
|
||||||
|
# Normalize by number of patterns
|
||||||
|
self.J = self.J / len(self.patterns)
|
||||||
|
np.fill_diagonal(self.J, 0) # No self-interactions
|
||||||
|
|
||||||
|
print("Words used to construct patterns:")
|
||||||
|
for i, words in enumerate(self.words):
|
||||||
|
print(f"\nPattern {i} words: {words}")
|
||||||
|
print(f"Pattern {i} configuration ({self.n_words}x{self.word_bits}):")
|
||||||
|
print(self.patterns[i])
|
||||||
|
|
||||||
|
def text_to_patterns(self, text, n_words, word_bits):
|
||||||
|
"""Convert text to patterns where each row encodes a full word"""
|
||||||
|
words = re.findall(r'\b\w+\b', text.lower())
|
||||||
|
patterns = []
|
||||||
|
pattern_words = []
|
||||||
|
|
||||||
|
# Modified to shift one word at a time
|
||||||
|
for i in range(len(words) - n_words + 1):
|
||||||
|
word_group = words[i:i+n_words]
|
||||||
|
pattern_words.append(word_group)
|
||||||
|
|
||||||
|
# Create pattern (n_words x word_bits)
|
||||||
|
pattern = np.zeros((n_words, word_bits))
|
||||||
|
for row, word in enumerate(word_group):
|
||||||
|
# Hash the word to a unique pattern
|
||||||
|
word_hash = sum(ord(c) for c in word)
|
||||||
|
# Generate word_bits number of bits
|
||||||
|
for col in range(word_bits):
|
||||||
|
bit_val = (word_hash >> col) & 1
|
||||||
|
pattern[row, col] = 1 if bit_val else -1
|
||||||
|
|
||||||
|
patterns.append(pattern)
|
||||||
|
|
||||||
|
if len(patterns) >= 5: # Limit to 5 patterns
|
||||||
|
break
|
||||||
|
|
||||||
|
return np.array(patterns), pattern_words
|
||||||
|
|
||||||
|
def calculate_energy(self, state):
|
||||||
|
"""
|
||||||
|
Calculate energy using only 2-point interactions
|
||||||
|
|
||||||
|
Args:
|
||||||
|
state (numpy.array): Array of +1/-1 spins
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: Energy of the configuration
|
||||||
|
"""
|
||||||
|
# 2-point interaction energy
|
||||||
|
energy = -0.5 * np.sum(self.J * np.outer(state, state))
|
||||||
|
return energy
|
||||||
|
|
||||||
|
def state_to_2d(self, state):
|
||||||
|
"""Convert 1D state array to 2D grid (n_words x word_bits)"""
|
||||||
|
return state.reshape(self.n_words, self.word_bits)
|
||||||
|
|
||||||
|
def state_to_words(self, state_2d):
|
||||||
|
"""Analyze a 2D state pattern to find closest matching words from patterns"""
|
||||||
|
closest_words = []
|
||||||
|
for row_idx, row in enumerate(state_2d):
|
||||||
|
# Find the pattern row that has the highest overlap with this state row
|
||||||
|
max_overlap = -1
|
||||||
|
best_word = None
|
||||||
|
|
||||||
|
for pattern_idx, pattern in enumerate(self.patterns):
|
||||||
|
for word_idx, pattern_row in enumerate(pattern):
|
||||||
|
overlap = abs(np.sum(row * pattern_row) / self.word_bits)
|
||||||
|
if overlap > max_overlap:
|
||||||
|
max_overlap = overlap
|
||||||
|
best_word = self.words[pattern_idx][word_idx]
|
||||||
|
|
||||||
|
closest_words.append(f"{best_word} (overlap: {max_overlap:.2f})")
|
||||||
|
return closest_words
|
||||||
|
|
||||||
|
def generate_states(n_spins):
|
||||||
|
for state in product([-1, 1], repeat=n_spins):
|
||||||
|
yield np.array(state)
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Create a 4x5 spin glass system (4 words per pattern, 5 bits per word)
|
||||||
|
n_words = 24
|
||||||
|
word_bits = 16
|
||||||
|
sg = SpinGlassHamiltonian(n_words=n_words, word_bits=word_bits, seed=42)
|
||||||
|
|
||||||
|
# Initialize the best state with random configuration
|
||||||
|
current_state = np.random.choice([-1, 1], size=n_words * word_bits)
|
||||||
|
current_energy = sg.calculate_energy(current_state)
|
||||||
|
|
||||||
|
# Learn one word at a time
|
||||||
|
for word_idx in range(n_words):
|
||||||
|
print(f"\nOptimizing word {word_idx + 1}...")
|
||||||
|
|
||||||
|
# Try all possibilities for current word while keeping others fixed
|
||||||
|
best_energy = current_energy
|
||||||
|
best_state = current_state.copy()
|
||||||
|
|
||||||
|
# Generate all possibilities for one word (2^word_bits combinations)
|
||||||
|
for word_state in product([-1, 1], repeat=word_bits):
|
||||||
|
# Create test state by replacing only the current word's bits
|
||||||
|
test_state = current_state.copy()
|
||||||
|
start_idx = word_idx * word_bits
|
||||||
|
end_idx = start_idx + word_bits
|
||||||
|
test_state[start_idx:end_idx] = word_state
|
||||||
|
|
||||||
|
# Calculate energy
|
||||||
|
energy = sg.calculate_energy(test_state)
|
||||||
|
|
||||||
|
# Update if better
|
||||||
|
if energy < best_energy:
|
||||||
|
best_energy = energy
|
||||||
|
best_state = test_state.copy()
|
||||||
|
|
||||||
|
# Update current state with best found for this word
|
||||||
|
current_state = best_state
|
||||||
|
current_energy = best_energy
|
||||||
|
|
||||||
|
# Show intermediate result
|
||||||
|
state_2d = sg.state_to_2d(current_state)
|
||||||
|
print(f"Current energy: {current_energy:.4f}")
|
||||||
|
print("Current state:")
|
||||||
|
print(state_2d)
|
||||||
|
words = sg.state_to_words(state_2d)
|
||||||
|
print("Current words:")
|
||||||
|
for i, word_info in enumerate(words):
|
||||||
|
print(f"Word {i+1}: {word_info}")
|
||||||
|
|
||||||
|
# Store final result
|
||||||
|
state_energies = [(current_state, current_energy)]
|
||||||
|
|
||||||
|
print(f"\nSpin Glass System with {n_words}x{word_bits} lattice")
|
||||||
|
print("\nOptimized state:")
|
||||||
|
|
||||||
|
# Get the single optimized state
|
||||||
|
state, energy = state_energies[0]
|
||||||
|
state_2d = sg.state_to_2d(state)
|
||||||
|
|
||||||
|
# Calculate absolute overlap with each pattern
|
||||||
|
overlaps = []
|
||||||
|
for p, pattern in enumerate(sg.patterns):
|
||||||
|
overlap = abs(np.sum(state_2d * pattern) / (n_words*word_bits))
|
||||||
|
overlaps.append(f"P{p}: {overlap:.2f}")
|
||||||
|
|
||||||
|
print(f"\nEnergy: {energy:.4f}")
|
||||||
|
print(f"State configuration:\n{state_2d}")
|
||||||
|
print("\nGenerated words:")
|
||||||
|
closest_words = sg.state_to_words(state_2d)
|
||||||
|
for row_idx, word_info in enumerate(closest_words):
|
||||||
|
print(f"Word {row_idx + 1}: {word_info}")
|
||||||
|
print(f"Absolute overlaps with patterns: {', '.join(overlaps)}")
|
||||||
|
|
||||||
|
# Show which original sentence this state is most similar to
|
||||||
|
best_pattern_idx = np.argmax([float(o.split(': ')[1]) for o in overlaps])
|
||||||
|
print(f"Most similar to sentence: {' '.join(sg.words[best_pattern_idx])}")
|
||||||
|
|
||||||
|
# Generate a 30-word sentence
|
||||||
|
print("\nGenerating 30-word sentence:")
|
||||||
|
# Start with the first n_words-1 words from the best matching pattern
|
||||||
|
sentence = list(sg.words[best_pattern_idx][:-1]) # Take all but last word as initial context
|
||||||
|
|
||||||
|
# Generate remaining words until we reach 30
|
||||||
|
while len(sentence) < 30:
|
||||||
|
# Create state from current context
|
||||||
|
context_state = np.zeros((n_words, word_bits))
|
||||||
|
for i, word in enumerate(sentence[-n_words+1:]):
|
||||||
|
word_hash = sum(ord(c) for c in word)
|
||||||
|
for col in range(word_bits):
|
||||||
|
bit_val = (word_hash >> col) & 1
|
||||||
|
context_state[i, col] = 1 if bit_val else -1
|
||||||
|
|
||||||
|
# Optimize the next word (last row of state)
|
||||||
|
best_energy = float('inf')
|
||||||
|
best_word_state = None
|
||||||
|
|
||||||
|
# Try all possibilities for the next word
|
||||||
|
for word_state in product([-1, 1], repeat=word_bits):
|
||||||
|
test_state = context_state.copy()
|
||||||
|
test_state[-1] = word_state
|
||||||
|
|
||||||
|
# Calculate energy
|
||||||
|
energy = sg.calculate_energy(test_state.flatten())
|
||||||
|
|
||||||
|
if energy < best_energy:
|
||||||
|
best_energy = energy
|
||||||
|
best_word_state = test_state.copy()
|
||||||
|
|
||||||
|
# Get the predicted word
|
||||||
|
words = sg.state_to_words(best_word_state)
|
||||||
|
next_word = words[-1].split(" (")[0] # Get just the word, not the overlap info
|
||||||
|
sentence.append(next_word)
|
||||||
|
|
||||||
|
print(" ".join(sentence))
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
185
spin_glass_gpu.py
Normal file
185
spin_glass_gpu.py
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
import jax
|
||||||
|
import jax.numpy as jnp
|
||||||
|
from functools import partial
|
||||||
|
from load_tinystories import (
|
||||||
|
tokenize_with_punctuation,
|
||||||
|
load_encodings,
|
||||||
|
BiDict
|
||||||
|
)
|
||||||
|
from tqdm import tqdm
|
||||||
|
|
||||||
|
@partial(jax.jit, static_argnums=(3, 4))
|
||||||
|
def calculate_weights(sequences, M, N):
|
||||||
|
"""
|
||||||
|
Calculate weight matrix using JAX for GPU acceleration.
|
||||||
|
"""
|
||||||
|
# Convert sequences to JAX array
|
||||||
|
sequences_jax = jnp.array(sequences)
|
||||||
|
|
||||||
|
# Calculate weights using matrix multiplication
|
||||||
|
W = jnp.sum(jnp.matmul(sequences_jax, sequences_jax.transpose(0, 2, 1)), axis=0)
|
||||||
|
|
||||||
|
# Normalize weights
|
||||||
|
W = W / len(sequences)
|
||||||
|
return W
|
||||||
|
|
||||||
|
@partial(jax.jit, static_argnums=(3, 4, 5))
|
||||||
|
def calculate_energies(complete_sequences, W, temperature):
|
||||||
|
"""
|
||||||
|
Calculate energies for all possible completions using JAX.
|
||||||
|
"""
|
||||||
|
# Calculate energies using matrix operations
|
||||||
|
energies = -0.5 * jnp.diagonal(
|
||||||
|
jnp.matmul(
|
||||||
|
jnp.matmul(complete_sequences, W),
|
||||||
|
complete_sequences.transpose(0, 2, 1)
|
||||||
|
)
|
||||||
|
).reshape(-1)
|
||||||
|
|
||||||
|
# Normalize energies
|
||||||
|
energies = energies - jnp.min(energies)
|
||||||
|
energies = energies / (jnp.max(energies) + 1e-10)
|
||||||
|
|
||||||
|
# Calculate probabilities
|
||||||
|
probabilities = jnp.exp(-energies/temperature)
|
||||||
|
probabilities = probabilities / jnp.sum(probabilities)
|
||||||
|
|
||||||
|
return energies, probabilities
|
||||||
|
|
||||||
|
def get_word_sequences_gpu(encoded_stories, M=100, N=12):
|
||||||
|
"""
|
||||||
|
Get sequences of M consecutive words, optimized for GPU.
|
||||||
|
"""
|
||||||
|
sequences = []
|
||||||
|
|
||||||
|
for story in tqdm(encoded_stories, desc="Generating sequences"):
|
||||||
|
if len(story) >= M:
|
||||||
|
for i in range(len(story) - M + 1):
|
||||||
|
word_group = story[i:i + M]
|
||||||
|
bits = []
|
||||||
|
for word in word_group:
|
||||||
|
bits.extend([int(bit) for bit in word])
|
||||||
|
vector = jnp.array(bits).reshape(M * N, 1)
|
||||||
|
sequences.append(vector)
|
||||||
|
|
||||||
|
return jnp.array(sequences)
|
||||||
|
|
||||||
|
def retrieve_sequences_gpu(sequences, partial_sequence, vocab, M=100, N=12, temperature=1.0):
|
||||||
|
"""
|
||||||
|
GPU-accelerated version of sequence retrieval using JAX.
|
||||||
|
"""
|
||||||
|
# Convert partial sequence to vector
|
||||||
|
partial_vec = jnp.array([int(bit) for bit in partial_sequence]).reshape(-1, 1)
|
||||||
|
|
||||||
|
# Get all possible words
|
||||||
|
possible_words = list(vocab.values())
|
||||||
|
|
||||||
|
# Calculate weight matrix
|
||||||
|
W = calculate_weights(sequences, M, N)
|
||||||
|
|
||||||
|
# Create complete sequences for all possible words
|
||||||
|
complete_sequences = []
|
||||||
|
for word in possible_words:
|
||||||
|
complete_sequence = partial_sequence + word
|
||||||
|
if len(complete_sequence) == M*N:
|
||||||
|
complete_vec = jnp.array([int(bit) for bit in complete_sequence]).reshape(M * N, 1)
|
||||||
|
complete_sequences.append(complete_vec)
|
||||||
|
|
||||||
|
complete_sequences = jnp.array(complete_sequences)
|
||||||
|
|
||||||
|
# Calculate energies and probabilities
|
||||||
|
energies, probabilities = calculate_energies(complete_sequences, W, temperature)
|
||||||
|
|
||||||
|
# Select word based on probabilities
|
||||||
|
selected_idx = jax.random.choice(
|
||||||
|
jax.random.PRNGKey(0),
|
||||||
|
len(possible_words),
|
||||||
|
p=probabilities
|
||||||
|
)
|
||||||
|
|
||||||
|
best_word = possible_words[selected_idx]
|
||||||
|
min_energy = float(energies[selected_idx])
|
||||||
|
|
||||||
|
# Find corresponding word
|
||||||
|
for word, vector in vocab.items():
|
||||||
|
if vector == best_word:
|
||||||
|
return word, best_word, min_energy
|
||||||
|
|
||||||
|
def predict_sequence_gpu(initial_sequence, vocab, sequences, D=10, M=100, N=12, temperature=1.0):
|
||||||
|
"""
|
||||||
|
GPU-accelerated version of sequence prediction.
|
||||||
|
"""
|
||||||
|
current_tokens = initial_sequence.copy()
|
||||||
|
predictions = []
|
||||||
|
energies = []
|
||||||
|
|
||||||
|
for _ in tqdm(range(D), desc="Predicting words"):
|
||||||
|
partial_sequence = ""
|
||||||
|
for token in current_tokens:
|
||||||
|
partial_sequence += vocab[token]
|
||||||
|
|
||||||
|
predicted_word, _, energy = retrieve_sequences_gpu(
|
||||||
|
sequences,
|
||||||
|
partial_sequence,
|
||||||
|
vocab,
|
||||||
|
M=M,
|
||||||
|
N=N,
|
||||||
|
temperature=temperature
|
||||||
|
)
|
||||||
|
|
||||||
|
predictions.append(predicted_word)
|
||||||
|
energies.append(energy)
|
||||||
|
|
||||||
|
current_tokens = current_tokens[1:] + [predicted_word]
|
||||||
|
|
||||||
|
return predictions, energies
|
||||||
|
|
||||||
|
def main():
|
||||||
|
# Load saved encodings
|
||||||
|
vocab, encoded_stories, original_stories = load_encodings()
|
||||||
|
if vocab is None:
|
||||||
|
print("No saved encodings found. Please run load_tinystories.py first.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Constants
|
||||||
|
M = 100
|
||||||
|
N = 13
|
||||||
|
D = 10
|
||||||
|
temperature = 1.0
|
||||||
|
|
||||||
|
print("Loading training sequences...")
|
||||||
|
sequences = get_word_sequences_gpu(encoded_stories=encoded_stories, M=M, N=N)
|
||||||
|
print(f"Loaded {len(sequences)} training sequences")
|
||||||
|
|
||||||
|
# Get sentence from user
|
||||||
|
print("\nEnter your story:")
|
||||||
|
sentence = input("Enter a sentence (at least 99 words): ")
|
||||||
|
initial_tokens = tokenize_with_punctuation(sentence)
|
||||||
|
|
||||||
|
if len(initial_tokens) < M-1:
|
||||||
|
print(f"Sentence too short. Got {len(initial_tokens)} tokens, need {M-1}.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Predict next words
|
||||||
|
print("\nPredicting next words...")
|
||||||
|
predicted_words, energies = predict_sequence_gpu(
|
||||||
|
initial_tokens[:M-1],
|
||||||
|
vocab,
|
||||||
|
sequences,
|
||||||
|
D=D,
|
||||||
|
M=M,
|
||||||
|
N=N,
|
||||||
|
temperature=temperature
|
||||||
|
)
|
||||||
|
|
||||||
|
# Print results
|
||||||
|
print("\nYour input ended with:")
|
||||||
|
print(" ".join(initial_tokens[-10:]))
|
||||||
|
print("\nPredicted continuation:")
|
||||||
|
print(" ".join(predicted_words))
|
||||||
|
print("\nEnergies of predictions:")
|
||||||
|
for i, (word, energy) in enumerate(zip(predicted_words, energies)):
|
||||||
|
print(f"Word {i+1} ('{word}'): {energy:.4f}")
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
Reference in New Issue
Block a user