You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
# El codigo a continuacion no es original sino es una adaptacion y estudio del algoritmo mejorado del ruido de Perlin
# Me enfoco en usar el algoritmo con solo la libreria de matematica/aleatoridad de numpy.
def generar_tabla_de_permutacion(size=256):
tabla = np.arange(size, dtype=int)
np.random.shuffle(tabla)
return np.concatenate([tabla, tabla])
# Genero una tabla de una permutacion de una lista de indices entre 0 y 255 inclusive,
# la funcion de esta es dar valores para las gradiantes de los vertices de las celdas de las cuadriculas
tabla_de_permutacion = generar_tabla_de_permutacion()
def lerp(a, b, x):
return a + x * (b - a)
# Lerp es una funcion de interpolacion linear que intenta suavizar los valores,
# creando una transicion ms suave
def fade(t):
return t * t * t * (t * (t * 6 - 15) + 10)
# fade es una función que suaviza la transicion entre los valores x e y con una inteprolacion cubica
# Complementa la interpolacion lineal o lerp, para que el resultado no se vea
# brusco o "cubico"
def grad_original(hash, x, y):
h = hash & 3
u = x if h < 2 else y
v = y if h < 2 else x
return (u if h & 1 == 0 else -u) + (v if h & 2 == 0 else -v)
#
# Esta grad similar a las funciones de lerp y fade son las funciones originales que Ken Perlin utilizo
# en el algoritmo original, aunque aun mas complicado, utiliza comparaciones y operaciones binarias
# para conseguir el producto escalar de una manera mas eficiente, sin utilizar multiplicacion.
def grad_simplificada(hash, x, y):
h = hash & 3
if h == 0:
grad_vector = (1.0, 1.0) # arriba a la derecha
elif h == 1:
grad_vector = (-1.0, 1.0) # arriba a la izquierda
elif h == 2:
grad_vector = (-1.0, -1.0) # abajo a la izquierda
else:
grad_vector = (1.0, -1.0) # abajo a la derecha
# Y calculamos el producto escalar
producto_escalar = grad_vector[0] * x + grad_vector[1] * y
return producto_escalar
def perlin(x, y):
xi = int(x) & 255
yi = int(y) & 255
# Determinamos las posiciones x e y como xi e yi como valores enteros en la matriz,
# el & 255 (maximo numero en 8 bits), asegura que los numeros esten dentro del rango 0-255
xf = x - int(x)
yf = y - int(y)
# Posiciones de x e y relativas a su posicion dentro de su celda en la cuadricula.
# El resultado final es un valor entre 0 y 0.99...
u = fade(xf)
v = fade(yf)
#
aa = tabla_de_permutacion[tabla_de_permutacion[xi] + yi] # Abajo a la izquierda
ab = tabla_de_permutacion[tabla_de_permutacion[xi] + yi + 1] # Abajo a la derecha
ba = tabla_de_permutacion[tabla_de_permutacion[xi + 1] + yi] # Arriba a la izquierda
bb = tabla_de_permutacion[tabla_de_permutacion[xi + 1] + yi + 1] # Arriba a la derecha
# Obtenemos de la tabla de permutacion los valores, se hace un acceso doble (se repite permutation_table)
# para aumentar el nivel de "entropia" o "caos" y que sea aun mas aleatorio posible
x1 = lerp(grad_simplificada(aa, xf, yf), grad_simplificada(ba, xf - 1, yf), u)
x2 = lerp(grad_simplificada(ab, xf, yf - 1), grad_simplificada(bb, xf - 1, yf - 1), u)
return lerp(x1, x2, v)
def generar_ruido_de_perlin(ancho, largo, escala):
ruido = np.zeros((ancho, largo))
for i in range(ancho):
for j in range(largo):
ruido[i][j] = perlin(i / escala, j / escala)
return ruido
# Crear la matriz de ruido Perlin
ancho, largo, escala = 50, 50, 10
ruido = generar_ruido_de_perlin(ancho, largo, escala)
# Mapear valores de ruido a emojis
def noise_to_emoji(noise_value):
if noise_value < -0.1:
return '🌊'
elif noise_value < 0.1:
return '🏖️'
elif noise_value < 0.5:
return '🌳'
else:
return '⛰️'
mapa_de_emojis = []
for i in range(ancho):
emoji = ""
for j in range(largo):
emoji += noise_to_emoji(ruido[i][j])
mapa_de_emojis.append(emoji)
# Imprimir la obra de emojis
for emoji in mapa_de_emojis:
print(emoji)
# https://docs.google.com/presentation/d/1VhjQvxGNvaNiTwyvcd8l2RZDykwxZKnnIKjMfUWbVng/edit?usp=sharing
#
# # 'Un resultado'
# 🏖️🌳🌳🌳🌳⛰️🌳🌳🌳🌳🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🏖️🌳🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🏖️🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🌊🏖️🏖️🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🌊🌊🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🏖️🏖️
# 🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳
# 🌊🌊🌊🏖️🏖️🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🌳🌳🌳🌳
# 🌊🌊🏖️🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳
# 🌊🏖️🌳🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🏖️🌳🌳🌳🌳
# 🏖️🌳🌳🌳🌳⛰️🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🏖️🏖️🏖️🌊🌊🏖️🏖️🌳🌳🌳🌳⛰️🌳🌳🌳🌳🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️
# 🌳🌳🌳🌳⛰️⛰️⛰️🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️
# 🌳🌳🌳⛰️⛰️⛰️⛰️⛰️🌳🌳🌳
8000
🏖️🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳⛰️🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️
# 🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🌳🌳🌳🌳⛰️⛰️⛰️🌳🌳🌳🌳🌳🌳🌳🌳⛰️⛰️🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️
# 🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌳⛰️⛰️🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️
# ⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️
# 🌳⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️
# 🌳🌳⛰️⛰️⛰️🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️
# 🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️
# 🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳⛰️⛰️⛰️🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️
# 🏖️🏖️🌳🌳🏖️🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️🏖️🏖️🌊🌊🏖️🏖️🌳🌳🌳🌳⛰️🌳🌳🌳🌳🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🏖️🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🌳🏖️🏖️🏖️🌊🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🏖️🌊🏖️🏖️🏖️🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🌊🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊
# 🌊🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️
# 🏖️🏖️🌳🌳🏖️🏖️🏖️🌊🌊🏖️🏖️🌳🌳🌳🌳⛰️🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳⛰️🌳🌳🌳🌳🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️
# 🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳⛰️⛰️⛰️🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🏖️🌳🌳🌳🌳
# 🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳
# 🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳
# 🌳🌳⛰️⛰️🌳🌳🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🏖️🏖️🏖️🌳🌳🌳🌳
# ⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️
# 🌳🌳🌳🌳🌳🌳🌳⛰️⛰️🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️
# 🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🏖️🌳🌳🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️
# 🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️
# 🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️
# 🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🏖️🏖️🏖️🌊🌊🏖️🏖️🌳🌳🌳🌳⛰️🌳🌳🌳🌳🏖️🏖️🌊🌊🏖️🏖️🏖️🌳🌳🏖️
# 🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🌳🌳🌳⛰️⛰️⛰️🌳🌳🌳🌳🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🏖️🏖️🏖️🌳🌳🌳🌳⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳🌳
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️🌳🌳🌳🌳🌳🌳🌳⛰️⛰️⛰️🌳
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳⛰️⛰️⛰️🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🏖️🏖️🌳🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🌊🌊🌊🏖️🏖️🌳🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️⛰️
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️⛰️⛰️🌳
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🌳🌳🌳🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳⛰️⛰️⛰️⛰️⛰️🌳🌳
# 🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳🌳🌳🌳🏖️🌊🌊🌊🌊🌊🏖️🏖️🏖️🏖️🏖️🏖️🌊🌊🌊🌊🌊🌊🌊🌊🏖️🌳🌳🌳🌳⛰️⛰️⛰️🌳🌳🌳```
The text was updated successfully, but these errors were encountered:
The text was updated successfully, but these errors were encountered: