The “New” Image of the City

Sritoma Bhattacharjee
5 min readOct 23, 2019

--

Sritoma Bhattacharjee and Shailee Kothari

Much of how we view the city today is dictated by social media. Instagram has become the ubiquitous image generator, continuously filtering the views of the city that we have today. The new algorithm brings “most liked” pictures to the top of the consumers’ feeds reinforcing the image. It leads us to wonder, how much of the way we move through the city is influenced by the instagramability (that’s a word now?) of a space?

The opening of the Heatherwick’s Vessel at Hudson Yards pops up again and again on insta-feeds attracting flocks of people interested in clicking more pictures and putting them on insta-feeds thus popping up again and again on insta-feeds attracting flocks of people interested in clicking more pictures and putting them on insta-feeds thus popping up again and again on insta-feeds attracting flocks of people interested in clicking more pictures and putting them on insta-feeds thus popping up again and again on insta-feeds attracting flocks of people interested in clicking more pictures and putting them on insta-feeds thus popping up again and again on insta-feeds attracting flocks of people interested in clicking more pictures and putting them on insta-feeds.

How can real estate developers and city planners use this to influence the way we build in a city? Could we possibly find:

  • recurrent spatial qualities that they can use to attract people?
  • chart where to place new developments if people’s paths are dictated by instagramability?
  • ways to collect data/deploy security that chart drone paths based on instagram activity to cover maximum people by covering minimum ground?
  • ways for people concerned with data privacy to have a plug in on their phone that allows them to map paths that avoid intersection with the data rich paths?

The isovist vision of agents type A originating from different parts of the NYU campus guides them on the most instagrammable paths of the city. On the other hand, data privacy concerned agent type B takes alternate routes.

view range of 60deg

The light blue spot represent the most instagrammable locations in the city. The agents can be observed moving with an affinity towards taking paths that have the insta-locations provided they fall in their view range.

view range of 90deg
//view casting by the agent and creating meshes
//ray cast to detect target (most instagrammed locations)
using System.Collections;using System.Collections.Generic;using UnityEngine;public class FieldOfView : MonoBehaviour{public float viewRadius;[Range(0, 360)]public float viewAngle;public LayerMask targetMask;public LayerMask obstacleMask;[HideInInspector]public List<Transform> visibleTargets = new List<Transform>();public float meshResolution;public int edgeResolveIterations;public float edgeDstThreshold;public float maskCutawayDst = .1f;public MeshFilter viewMeshFilter;Mesh viewMesh;void Start(){viewMesh = new Mesh();viewMesh.name = “View Mesh”;viewMeshFilter.mesh = viewMesh;StartCoroutine(“FindTargetsWithDelay”, .2f);}IEnumerator FindTargetsWithDelay(float delay){while (true){yield return new WaitForSeconds(delay);FindVisibleTargets();}}void LateUpdate(){DrawFieldOfView();}void FindVisibleTargets(){visibleTargets.Clear();Collider[] targetsInViewRadius = Physics.OverlapSphere(transform.position, viewRadius, targetMask);for (int i = 0; i < targetsInViewRadius.Length; i++){Transform target = targetsInViewRadius[i].transform;Vector3 dirToTarget = (target.position — transform.position).normalized;if (Vector3.Angle(transform.forward, dirToTarget) < viewAngle / 2){float dstToTarget = Vector3.Distance(transform.position, target.position);if (!Physics.Raycast(transform.position, dirToTarget, dstToTarget, obstacleMask)){visibleTargets.Add(target);}}}}void DrawFieldOfView(){int stepCount = Mathf.RoundToInt(viewAngle * meshResolution);float stepAngleSize = viewAngle / stepCount;List<Vector3> viewPoints = new List<Vector3>();ViewCastInfo oldViewCast = new ViewCastInfo();for (int i = 0; i <= stepCount; i++){float angle = transform.eulerAngles.y — viewAngle / 2 + stepAngleSize * i;ViewCastInfo newViewCast = ViewCast(angle);if (i > 0){bool edgeDstThresholdExceeded = Mathf.Abs(oldViewCast.dst — newViewCast.dst) > edgeDstThreshold;if (oldViewCast.hit != newViewCast.hit || (oldViewCast.hit && newViewCast.hit && edgeDstThresholdExceeded)){EdgeInfo edge = FindEdge(oldViewCast, newViewCast);if (edge.pointA != Vector3.zero){viewPoints.Add(edge.pointA);}if (edge.pointB != Vector3.zero){viewPoints.Add(edge.pointB);}}}viewPoints.Add(newViewCast.point);oldViewCast = newViewCast;}int vertexCount = viewPoints.Count + 1;Vector3[] vertices = new Vector3[vertexCount];int[] triangles = new int[(vertexCount — 2) * 3];vertices[0] = Vector3.zero;for (int i = 0; i < vertexCount — 1; i++){vertices[i + 1] = transform.InverseTransformPoint(viewPoints[i]) + Vector3.forward * maskCutawayDst;if (i < vertexCount — 2){triangles[i * 3] = 0;triangles[i * 3 + 1] = i + 1;triangles[i * 3 + 2] = i + 2;}}viewMesh.Clear();viewMesh.vertices = vertices;viewMesh.triangles = triangles;viewMesh.RecalculateNormals();}EdgeInfo FindEdge(ViewCastInfo minViewCast, ViewCastInfo maxViewCast){float minAngle = minViewCast.angle;float maxAngle = maxViewCast.angle;Vector3 minPoint = Vector3.zero;Vector3 maxPoint = Vector3.zero;for (int i = 0; i < edgeResolveIterations; i++){float angle = (minAngle + maxAngle) / 2;ViewCastInfo newViewCast = ViewCast(angle);bool edgeDstThresholdExceeded = Mathf.Abs(minViewCast.dst — newViewCast.dst) > edgeDstThreshold;if (newViewCast.hit == minViewCast.hit && !edgeDstThresholdExceeded){minAngle = angle;minPoint = newViewCast.point;}else{maxAngle = angle;maxPoint = newViewCast.point;}}return new EdgeInfo(minPoint, maxPoint);}ViewCastInfo ViewCast(float globalAngle){Vector3 dir = DirFromAngle(globalAngle, true);RaycastHit hit;if (Physics.Raycast(transform.position, dir, out hit, viewRadius, obstacleMask)){return new ViewCastInfo(true, hit.point, hit.distance, globalAngle);}else{return new ViewCastInfo(false, transform.position + dir * viewRadius, viewRadius, globalAngle);}}public Vector3 DirFromAngle(float angleInDegrees, bool angleIsGlobal){if (!angleIsGlobal){angleInDegrees += transform.eulerAngles.y;}return new Vector3(Mathf.Sin(angleInDegrees * Mathf.Deg2Rad), 0, Mathf.Cos(angleInDegrees * Mathf.Deg2Rad));}public struct ViewCastInfo{public bool hit;public Vector3 point;public float dst;public float angle;public ViewCastInfo(bool _hit, Vector3 _point, float _dst, float _angle){hit = _hit;point = _point;dst = _dst;angle = _angle;}}public struct EdgeInfo{public Vector3 pointA;public Vector3 pointB;public EdgeInfo(Vector3 _pointA, Vector3 _pointB){pointA = _pointA;pointB = _pointB;}}}//rules for agent movement (agent moves to the point the raycast hits the target location within its field of view).
using UnityEngine;
using UnityEngine.AI;//defining the variables public class PlayerController : MonoBehaviour
{
public Camera cam;public NavMeshAgent agent;
void Update()

{
Ray ray = cam.ScreenPointToRay(Input.RaycastHit);
RaycastHit hit;
if(Physics.Raycast(ray, out hit))
{
agent.SetDestination(hit.point);
}
}
}

--

--