YAML Metadata Warning:empty or missing yaml metadata in repo card

Check out the documentation for more information.

import numpy as np import hashlib import matplotlib.pyplot as plt import os from copy import deepcopy

class SymbolicMemory: def init(self): self.memory = {} self.history = []

def encode(self, grid):
    flat = grid.flatten()
    key = hashlib.sha256(str((grid.shape, tuple(np.bincount(flat, minlength=10)))).encode()).hexdigest()
    return key

def store(self, key, value):
    self.memory[key] = value

def entropy(self, grid):
    flat = grid.flatten()
    _, counts = np.unique(flat, return_counts=True)
    probs = counts / counts.sum()
    entropy = -np.sum(probs * np.log2(probs))
    self.history.append(entropy)
    return entropy

def plot(self, task_id):
    os.makedirs("entropy_graphs", exist_ok=True)
    plt.plot(self.history)
    plt.title(f"ZEVE Entropy: {task_id}")
    plt.xlabel("Step")
    plt.ylabel("Entropy")
    plt.savefig(f"entropy_graphs/entropy_{task_id}.png")
    plt.clf()

class IRACOETSolver: def init(self): self.memory = SymbolicMemory()

def solve_task(self, task):
    for pair in task['train']:
        key = self.memory.encode(np.array(pair['input']))
        self.memory.store(key, pair['output'])

    results = []
    for pair in task['test']:
        result = self.solve(np.array(pair['input']), task.get('id', 'unknown'))
        results.append(result)
    return results

def solve(self, grid, task_id):
    current = deepcopy(grid)
    best = self.memory.entropy(current)
    for _ in range(9):
        options = [
            np.fliplr(current), np.flipud(current),
            np.rot90(current), np.roll(current, 1, 0), np.roll(current, 1, 1)
        ]
        scored = [(g, self.memory.entropy(g)) for g in options]
        candidate, score = min(scored, key=lambda x: x[1])
        if score < best:
            current, best = candidate, score
        else:
            break
    self.memory.plot(task_id)
    return current.tolist()
Downloads last month

-

Downloads are not tracked for this model. How to track
Inference Providers NEW
This model isn't deployed by any Inference Provider. 🙋 Ask for provider support