From 908351dd77e8a703fb55b32a209c2fca4f551669 Mon Sep 17 00:00:00 2001 From: =?utf8?q?Fran=C3=A7ois=20Fleuret?= Date: Sat, 22 Jun 2024 23:51:18 +0200 Subject: [PATCH] Update. --- world.py | 141 ++++++++++++++++++++++++++++++++++++++++++++++--------- 1 file changed, 120 insertions(+), 21 deletions(-) diff --git a/world.py b/world.py index 05d7505..68f46de 100755 --- a/world.py +++ b/world.py @@ -18,31 +18,26 @@ from torch.nn import functional as F colors = torch.tensor( [ [255, 255, 255], - [0, 0, 255], + [255, 20, 147], [0, 0, 255], [0, 192, 0], - [0, 255, 0], - [0, 255, 127], - [0, 255, 255], [0, 255, 255], - [30, 144, 255], - [64, 224, 208], - [65, 105, 225], - [75, 0, 130], - [106, 90, 205], - [128, 0, 128], - [135, 206, 235], [192, 192, 192], - [220, 20, 60], - [250, 128, 114], + [106, 90, 205], [255, 0, 0], - [255, 0, 255], - [255, 105, 180], - [255, 127, 80], - [255, 165, 0], - [255, 182, 193], - [255, 20, 147], + [220, 20, 60], + [65, 105, 225], [255, 200, 0], + # [255, 182, 193], + # [75, 0, 130], + # [128, 0, 128], + # [30, 144, 255], + # [135, 206, 235], + # [0, 255, 0], + # [64, 224, 208], + # [250, 128, 114], + # [255, 165, 0], + # [0, 255, 255], ] ) @@ -56,6 +51,110 @@ token2char = "_" + "".join([chr(ord("A") + n) for n in range(len(colors) - 1)]) def generate( + nb, + height, + width, + nb_birds=3, + nb_iterations=1, +): + pairs = [] + + for _ in tqdm.tqdm(range(nb), dynamic_ncols=True, desc="world generation"): + f_start = torch.zeros(height, width, dtype=torch.int64) + + i, j, vi, vj = ( + torch.empty(nb_birds, dtype=torch.int64), + torch.empty(nb_birds, dtype=torch.int64), + torch.empty(nb_birds, dtype=torch.int64), + torch.empty(nb_birds, dtype=torch.int64), + ) + + col = torch.randperm(colors.size(0) - 1)[:nb_birds].sort().values + 1 + + for n in range(nb_birds): + c = col[n] + + while True: + i[n], j[n] = ( + torch.randint(height, (1,))[0], + torch.randint(width, (1,))[0], + ) + vm = torch.randint(4, (1,))[0] + vi[n], vj[n] = (vm % 2) * 2 - 1, (vm // 2) * 2 - 1 + if ( + i[n] - vi[n] >= 0 + and i[n] - vi[n] < height + and j[n] - vj[n] >= 0 + and j[n] - vj[n] < width + and f_start[i[n], j[n]] == 0 + and f_start[i[n] - vi[n], j[n]] == 0 + and f_start[i[n], j[n] - vj[n]] == 0 + ): + break + + f_start[i[n], j[n]] = c + f_start[i[n] - vi[n], j[n]] = c + f_start[i[n], j[n] - vj[n]] = c + + f_end = f_start.clone() + + for l in range(nb_iterations): + for n in range(nb_birds): + c = col[n] + f_end[i[n], j[n]] = 0 + f_end[i[n] - vi[n], j[n]] = 0 + f_end[i[n], j[n] - vj[n]] = 0 + + pi, pj, pvi, pvj = i[n].item(), j[n].item(), vi[n].item(), vj[n].item() + + assert ( + f_end[i[n], j[n]] == 0 + and f_end[i[n] - vi[n], j[n]] == 0 + and f_end[i[n], j[n] - vj[n]] == 0 + ) + + if (i[n] == 0 and vi[n] == -1) or (i[n] == height - 1 and vi[n] == 1): + vi[n] = -vi[n] + if (j[n] == 0 and vj[n] == -1) or (j[n] == width - 1 and vj[n] == 1): + vj[n] = -vj[n] + + i[n] += vi[n] + j[n] += vj[n] + + if not ( + f_end[i[n], j[n]] == 0 + and f_end[i[n] - vi[n], j[n]] == 0 + and f_end[i[n], j[n] - vj[n]] == 0 + ): + i[n], j[n], vi[n], vj[n] = pi, pj, pvi, pvj + + f_end[i[n], j[n]] = c + f_end[i[n] - vi[n], j[n]] = c + f_end[i[n], j[n] - vj[n]] = c + + pairs.append((f_start, f_end)) + + result = [] + for p in pairs: + if torch.rand(1) < 0.5: + result.append( + torch.cat( + [p[0].flatten(), torch.tensor([token_forward]), p[1].flatten()], + dim=0, + )[None, :] + ) + else: + result.append( + torch.cat( + [p[1].flatten(), torch.tensor([token_backward]), p[0].flatten()], + dim=0, + )[None, :] + ) + + return torch.cat(result, dim=0) + + +def generate_( nb, height, width, @@ -159,7 +258,7 @@ def sample2img(seq, height, width, upscale=15): n, :, (height * upscale) // 2 - upscale // 2 + k, - 3 + abs(k - upscale // 2), + 3 + upscale // 2 - abs(k - upscale // 2), ] = 0 elif direction[n] == token_backward: for k in range(upscale): @@ -167,7 +266,7 @@ def sample2img(seq, height, width, upscale=15): n, :, (height * upscale) // 2 - upscale // 2 + k, - 3 + upscale // 2 - abs(k - upscale // 2), + 3 + abs(k - upscale // 2), ] = 0 else: for k in range(2, upscale - 2): -- 2.39.5