getting the local vertex positions in vertex shader

Is it possible that the vertex position given in the struct appdata_base are already the transformed? I need to get the local positions of my vertices in the vertex shader but after 30 min of confusion and testing I noticed that the vertex position are already in the world space and all the matrices are changed to compensate this. e.g. _Object2World seems to be a normal identity matrix. I can’t find any documentation about this.

How do I get the actual local vertex positions and the real world/model matrix? Is there any documentation?

Example shader:

Shader "own/markercell"
{
	Properties 
	{
		_MaskTex ("_MaskTex", 2D) = "white" {}
		_MainColor ("_MainColor", Color) = (1,1,1,1)
		_NeighbourPos("_NeighbourPos", Vector) = (1,1,1,1)

	}
    SubShader
	{
		Lighting Off 
        Fog { Mode Off }
        ZWrite Off
		Blend SrcAlpha OneMinusSrcAlpha	  


		Tags { "Queue"="Transparent" "RenderType"="Transparent" }
        Pass
		{
            CGPROGRAM

            #pragma vertex vert
            #pragma fragment frag

            #include "UnityCG.cginc"
			
			sampler2D _MaskTex;
			uniform fixed4 _MainColor;
			uniform float _Scale;
			uniform float4 _NeighbourPos;

			struct vertOut 
			{
                float4 pos : SV_POSITION;
                float2 tex : TEXCOORD0;
                float3 lpos : TEXCOORD2;
            };
			

            vertOut vert(appdata_full input)
			{
                vertOut output;

                
				fixed4 pos = input.vertex; 

				output.lpos = input.vertex;
                output.pos = mul (UNITY_MATRIX_MVP, pos);
                output.tex = input.texcoord;
                return output;
            }

            fixed4 frag(vertOut input) : COLOR0
			{
				fixed4 output = fixed4(0,0,0,0);
				output.a = 1;
				output.rgb = input.lpos;

				return output;
            }

            ENDCG
        }
    }
}

The shader should simply show the local position as color but it shows the world position.

Ok, so if I’m right, it’s actually a simpler solution than I first thought. Unity is using dynamic batching - combining your objects’ meshes to reduce drawcalls, but because that happens before it’s sent to the GPU (which is the whole point), the vertex information seen by the shader has been modified.

The solution is simply to disable dynamic batching in Edit → Project Settings → Player.

I have a similar issue. I’m trying to write a shader that draws random white pixels on a solid black plane, using local coordinates of the plane to generate random values.
This is what I’ve written:

Shader "Custom/stars_shader"
{
	SubShader
	{
		Pass 
		{
			CGPROGRAM
	        #pragma fragment frag
	        #pragma vertex vert
	        #include "UnityCG.cginc"
			
	        struct fragmentInput
	  		{
	  			float4 m_position : TEXCOORD2;
				float4 position : SV_POSITION;
	  		};

	        fragmentInput vert(appdata_base i)
	        {
	        	fragmentInput o;
	        	o.position = mul (UNITY_MATRIX_MVP, i.vertex);
	        	o.m_position = i.vertex;
	        	return o;
	        }
	  		
	  		float rand(float3 co)
			{
				return frac(sin(dot(co.xyz, float3(12.9898,78.233,45.5432))) * 43758.5453);
			}

	    	float4 frag(fragmentInput i) : COLOR
	    	{
	        	float4 color;
	        	if(rand(i.m_position.xyz) < 0.995f)
	        		color = float4(0, 0, 0, 1);
	        	else
	        		color  = float4(1, 1, 1, 1);
	        	return color;
	    	}
	        
	        ENDCG
		}
	} 
	FallBack "Diffuse"
}

But the results are quite weird. As the plane and\or the camera move around, the white pixels change positions chaotically, which makes me think that the random values are re-generated for each movement.
I have disabled dynamic batching, and the scene contains only the camera and the plane.